blob: 1a6f863c5feaf9e40b657db26380f9ecb481a32c [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/hydrogen.h"
6
7#include <sstream>
8
9#include "src/allocation-site-scopes.h"
10#include "src/ast/ast-numbering.h"
11#include "src/ast/scopeinfo.h"
12#include "src/code-factory.h"
13#include "src/crankshaft/hydrogen-bce.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/crankshaft/hydrogen-canonicalize.h"
15#include "src/crankshaft/hydrogen-check-elimination.h"
16#include "src/crankshaft/hydrogen-dce.h"
17#include "src/crankshaft/hydrogen-dehoist.h"
18#include "src/crankshaft/hydrogen-environment-liveness.h"
19#include "src/crankshaft/hydrogen-escape-analysis.h"
20#include "src/crankshaft/hydrogen-gvn.h"
21#include "src/crankshaft/hydrogen-infer-representation.h"
22#include "src/crankshaft/hydrogen-infer-types.h"
23#include "src/crankshaft/hydrogen-load-elimination.h"
24#include "src/crankshaft/hydrogen-mark-deoptimize.h"
25#include "src/crankshaft/hydrogen-mark-unreachable.h"
26#include "src/crankshaft/hydrogen-osr.h"
27#include "src/crankshaft/hydrogen-range-analysis.h"
28#include "src/crankshaft/hydrogen-redundant-phi.h"
29#include "src/crankshaft/hydrogen-removable-simulates.h"
30#include "src/crankshaft/hydrogen-representation-changes.h"
31#include "src/crankshaft/hydrogen-sce.h"
32#include "src/crankshaft/hydrogen-store-elimination.h"
33#include "src/crankshaft/hydrogen-uint32-analysis.h"
34#include "src/crankshaft/lithium-allocator.h"
35#include "src/crankshaft/typing.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010036#include "src/field-type.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037#include "src/full-codegen/full-codegen.h"
38#include "src/ic/call-optimization.h"
39#include "src/ic/ic.h"
40// GetRootConstructor
41#include "src/ic/ic-inl.h"
42#include "src/isolate-inl.h"
43#include "src/parsing/parser.h"
44#include "src/runtime/runtime.h"
45
46#if V8_TARGET_ARCH_IA32
47#include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
48#elif V8_TARGET_ARCH_X64
49#include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
50#elif V8_TARGET_ARCH_ARM64
51#include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
52#elif V8_TARGET_ARCH_ARM
53#include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
54#elif V8_TARGET_ARCH_PPC
55#include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
56#elif V8_TARGET_ARCH_MIPS
57#include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
58#elif V8_TARGET_ARCH_MIPS64
59#include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
Ben Murdochda12d292016-06-02 14:46:10 +010060#elif V8_TARGET_ARCH_S390
61#include "src/crankshaft/s390/lithium-codegen-s390.h" // NOLINT
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062#elif V8_TARGET_ARCH_X87
63#include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
64#else
65#error Unsupported target architecture.
66#endif
67
68namespace v8 {
69namespace internal {
70
Ben Murdoch61f157c2016-09-16 13:49:30 +010071const auto GetRegConfig = RegisterConfiguration::Crankshaft;
72
Ben Murdochc5610432016-08-08 18:44:38 +010073class HOptimizedGraphBuilderWithPositions : public HOptimizedGraphBuilder {
74 public:
75 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
76 : HOptimizedGraphBuilder(info) {}
77
78#define DEF_VISIT(type) \
79 void Visit##type(type* node) override { \
80 SourcePosition old_position = SourcePosition::Unknown(); \
81 if (node->position() != RelocInfo::kNoPosition) { \
82 old_position = source_position(); \
83 SetSourcePosition(node->position()); \
84 } \
85 HOptimizedGraphBuilder::Visit##type(node); \
86 if (!old_position.IsUnknown()) { \
87 set_source_position(old_position); \
88 } \
89 }
90 EXPRESSION_NODE_LIST(DEF_VISIT)
91#undef DEF_VISIT
92
93#define DEF_VISIT(type) \
94 void Visit##type(type* node) override { \
95 SourcePosition old_position = SourcePosition::Unknown(); \
96 if (node->position() != RelocInfo::kNoPosition) { \
97 old_position = source_position(); \
98 SetSourcePosition(node->position()); \
99 } \
100 HOptimizedGraphBuilder::Visit##type(node); \
101 if (!old_position.IsUnknown()) { \
102 set_source_position(old_position); \
103 } \
104 }
105 STATEMENT_NODE_LIST(DEF_VISIT)
106#undef DEF_VISIT
107
108#define DEF_VISIT(type) \
109 void Visit##type(type* node) override { \
110 HOptimizedGraphBuilder::Visit##type(node); \
111 }
112 DECLARATION_NODE_LIST(DEF_VISIT)
113#undef DEF_VISIT
114};
115
116HCompilationJob::Status HCompilationJob::CreateGraphImpl() {
117 bool dont_crankshaft = info()->shared_info()->dont_crankshaft();
118
119 // Optimization requires a version of fullcode with deoptimization support.
120 // Recompile the unoptimized version of the code if the current version
121 // doesn't have deoptimization support already.
122 // Otherwise, if we are gathering compilation time and space statistics
123 // for hydrogen, gather baseline statistics for a fullcode compilation.
124 bool should_recompile = !info()->shared_info()->has_deoptimization_support();
125 if (should_recompile || FLAG_hydrogen_stats) {
126 base::ElapsedTimer timer;
127 if (FLAG_hydrogen_stats) {
128 timer.Start();
129 }
130 if (!Compiler::EnsureDeoptimizationSupport(info())) {
131 return FAILED;
132 }
133 if (FLAG_hydrogen_stats) {
134 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
135 }
136 }
137 DCHECK(info()->shared_info()->has_deoptimization_support());
138 DCHECK(!info()->shared_info()->never_compiled());
139
140 if (!isolate()->use_crankshaft() || dont_crankshaft) {
141 // Crankshaft is entirely disabled.
142 return FAILED;
143 }
144
145 // Check the whitelist for Crankshaft.
146 if (!info()->shared_info()->PassesFilter(FLAG_hydrogen_filter)) {
147 return AbortOptimization(kHydrogenFilter);
148 }
149
150 Scope* scope = info()->scope();
151 if (LUnallocated::TooManyParameters(scope->num_parameters())) {
152 // Crankshaft would require too many Lithium operands.
153 return AbortOptimization(kTooManyParameters);
154 }
155
156 if (info()->is_osr() &&
157 LUnallocated::TooManyParametersOrStackSlots(scope->num_parameters(),
158 scope->num_stack_slots())) {
159 // Crankshaft would require too many Lithium operands.
160 return AbortOptimization(kTooManyParametersLocals);
161 }
162
163 if (IsGeneratorFunction(info()->shared_info()->kind())) {
164 // Crankshaft does not support generators.
165 return AbortOptimization(kGenerator);
166 }
167
168 if (FLAG_trace_hydrogen) {
169 isolate()->GetHTracer()->TraceCompilation(info());
170 }
171
172 // Optimization could have been disabled by the parser. Note that this check
173 // is only needed because the Hydrogen graph builder is missing some bailouts.
174 if (info()->shared_info()->optimization_disabled()) {
175 return AbortOptimization(
176 info()->shared_info()->disable_optimization_reason());
177 }
178
179 HOptimizedGraphBuilder* graph_builder =
180 (info()->is_tracking_positions() || FLAG_trace_ic)
181 ? new (info()->zone()) HOptimizedGraphBuilderWithPositions(info())
182 : new (info()->zone()) HOptimizedGraphBuilder(info());
183
184 // Type-check the function.
185 AstTyper(info()->isolate(), info()->zone(), info()->closure(),
186 info()->scope(), info()->osr_ast_id(), info()->literal(),
187 graph_builder->bounds())
188 .Run();
189
190 graph_ = graph_builder->CreateGraph();
191
192 if (isolate()->has_pending_exception()) {
193 return FAILED;
194 }
195
196 if (graph_ == NULL) return FAILED;
197
198 if (info()->dependencies()->HasAborted()) {
199 // Dependency has changed during graph creation. Let's try again later.
200 return RetryOptimization(kBailedOutDueToDependencyChange);
201 }
202
203 return SUCCEEDED;
204}
205
206HCompilationJob::Status HCompilationJob::OptimizeGraphImpl() {
207 DCHECK(graph_ != NULL);
208 BailoutReason bailout_reason = kNoReason;
209
210 if (graph_->Optimize(&bailout_reason)) {
211 chunk_ = LChunk::NewChunk(graph_);
212 if (chunk_ != NULL) return SUCCEEDED;
213 } else if (bailout_reason != kNoReason) {
214 info()->AbortOptimization(bailout_reason);
215 }
216
217 return FAILED;
218}
219
220HCompilationJob::Status HCompilationJob::GenerateCodeImpl() {
221 DCHECK(chunk_ != NULL);
222 DCHECK(graph_ != NULL);
223 {
224 // Deferred handles reference objects that were accessible during
225 // graph creation. To make sure that we don't encounter inconsistencies
226 // between graph creation and code generation, we disallow accessing
227 // objects through deferred handles during the latter, with exceptions.
228 DisallowDeferredHandleDereference no_deferred_handle_deref;
229 Handle<Code> optimized_code = chunk_->Codegen();
230 if (optimized_code.is_null()) {
231 if (info()->bailout_reason() == kNoReason) {
232 return AbortOptimization(kCodeGenerationFailed);
233 }
234 return FAILED;
235 }
236 RegisterWeakObjectsInOptimizedCode(optimized_code);
237 info()->SetCode(optimized_code);
238 }
239 // Add to the weak list of optimized code objects.
240 info()->context()->native_context()->AddOptimizedCode(*info()->code());
241 return SUCCEEDED;
242}
243
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000244HBasicBlock::HBasicBlock(HGraph* graph)
245 : block_id_(graph->GetNextBlockID()),
246 graph_(graph),
247 phis_(4, graph->zone()),
248 first_(NULL),
249 last_(NULL),
250 end_(NULL),
251 loop_information_(NULL),
252 predecessors_(2, graph->zone()),
253 dominator_(NULL),
254 dominated_blocks_(4, graph->zone()),
255 last_environment_(NULL),
256 argument_count_(-1),
257 first_instruction_index_(-1),
258 last_instruction_index_(-1),
259 deleted_phis_(4, graph->zone()),
260 parent_loop_header_(NULL),
261 inlined_entry_block_(NULL),
262 is_inline_return_target_(false),
263 is_reachable_(true),
264 dominates_loop_successors_(false),
265 is_osr_entry_(false),
266 is_ordered_(false) { }
267
268
269Isolate* HBasicBlock::isolate() const {
270 return graph_->isolate();
271}
272
273
274void HBasicBlock::MarkUnreachable() {
275 is_reachable_ = false;
276}
277
278
279void HBasicBlock::AttachLoopInformation() {
280 DCHECK(!IsLoopHeader());
281 loop_information_ = new(zone()) HLoopInformation(this, zone());
282}
283
284
285void HBasicBlock::DetachLoopInformation() {
286 DCHECK(IsLoopHeader());
287 loop_information_ = NULL;
288}
289
290
291void HBasicBlock::AddPhi(HPhi* phi) {
292 DCHECK(!IsStartBlock());
293 phis_.Add(phi, zone());
294 phi->SetBlock(this);
295}
296
297
298void HBasicBlock::RemovePhi(HPhi* phi) {
299 DCHECK(phi->block() == this);
300 DCHECK(phis_.Contains(phi));
301 phi->Kill();
302 phis_.RemoveElement(phi);
303 phi->SetBlock(NULL);
304}
305
306
307void HBasicBlock::AddInstruction(HInstruction* instr, SourcePosition position) {
308 DCHECK(!IsStartBlock() || !IsFinished());
309 DCHECK(!instr->IsLinked());
310 DCHECK(!IsFinished());
311
312 if (!position.IsUnknown()) {
313 instr->set_position(position);
314 }
315 if (first_ == NULL) {
316 DCHECK(last_environment() != NULL);
317 DCHECK(!last_environment()->ast_id().IsNone());
318 HBlockEntry* entry = new(zone()) HBlockEntry();
319 entry->InitializeAsFirst(this);
320 if (!position.IsUnknown()) {
321 entry->set_position(position);
322 } else {
323 DCHECK(!FLAG_hydrogen_track_positions ||
324 !graph()->info()->IsOptimizing() || instr->IsAbnormalExit());
325 }
326 first_ = last_ = entry;
327 }
328 instr->InsertAfter(last_);
329}
330
331
332HPhi* HBasicBlock::AddNewPhi(int merged_index) {
333 if (graph()->IsInsideNoSideEffectsScope()) {
334 merged_index = HPhi::kInvalidMergedIndex;
335 }
336 HPhi* phi = new(zone()) HPhi(merged_index, zone());
337 AddPhi(phi);
338 return phi;
339}
340
341
342HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
343 RemovableSimulate removable) {
344 DCHECK(HasEnvironment());
345 HEnvironment* environment = last_environment();
346 DCHECK(ast_id.IsNone() ||
347 ast_id == BailoutId::StubEntry() ||
348 environment->closure()->shared()->VerifyBailoutId(ast_id));
349
350 int push_count = environment->push_count();
351 int pop_count = environment->pop_count();
352
353 HSimulate* instr =
354 new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
355#ifdef DEBUG
356 instr->set_closure(environment->closure());
357#endif
358 // Order of pushed values: newest (top of stack) first. This allows
359 // HSimulate::MergeWith() to easily append additional pushed values
360 // that are older (from further down the stack).
361 for (int i = 0; i < push_count; ++i) {
362 instr->AddPushedValue(environment->ExpressionStackAt(i));
363 }
364 for (GrowableBitVector::Iterator it(environment->assigned_variables(),
365 zone());
366 !it.Done();
367 it.Advance()) {
368 int index = it.Current();
369 instr->AddAssignedValue(index, environment->Lookup(index));
370 }
371 environment->ClearHistory();
372 return instr;
373}
374
375
376void HBasicBlock::Finish(HControlInstruction* end, SourcePosition position) {
377 DCHECK(!IsFinished());
378 AddInstruction(end, position);
379 end_ = end;
380 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
381 it.Current()->RegisterPredecessor(this);
382 }
383}
384
385
386void HBasicBlock::Goto(HBasicBlock* block, SourcePosition position,
387 FunctionState* state, bool add_simulate) {
388 bool drop_extra = state != NULL &&
389 state->inlining_kind() == NORMAL_RETURN;
390
391 if (block->IsInlineReturnTarget()) {
392 HEnvironment* env = last_environment();
393 int argument_count = env->arguments_environment()->parameter_count();
394 AddInstruction(new(zone())
395 HLeaveInlined(state->entry(), argument_count),
396 position);
397 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
398 }
399
400 if (add_simulate) AddNewSimulate(BailoutId::None(), position);
401 HGoto* instr = new(zone()) HGoto(block);
402 Finish(instr, position);
403}
404
405
406void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state,
407 SourcePosition position) {
408 HBasicBlock* target = state->function_return();
409 bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
410
411 DCHECK(target->IsInlineReturnTarget());
412 DCHECK(return_value != NULL);
413 HEnvironment* env = last_environment();
414 int argument_count = env->arguments_environment()->parameter_count();
415 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
416 position);
417 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
418 last_environment()->Push(return_value);
419 AddNewSimulate(BailoutId::None(), position);
420 HGoto* instr = new(zone()) HGoto(target);
421 Finish(instr, position);
422}
423
424
425void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
426 DCHECK(!HasEnvironment());
427 DCHECK(first() == NULL);
428 UpdateEnvironment(env);
429}
430
431
432void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
433 last_environment_ = env;
434 graph()->update_maximum_environment_size(env->first_expression_index());
435}
436
437
438void HBasicBlock::SetJoinId(BailoutId ast_id) {
439 int length = predecessors_.length();
440 DCHECK(length > 0);
441 for (int i = 0; i < length; i++) {
442 HBasicBlock* predecessor = predecessors_[i];
443 DCHECK(predecessor->end()->IsGoto());
444 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
445 DCHECK(i != 0 ||
446 (predecessor->last_environment()->closure().is_null() ||
447 predecessor->last_environment()->closure()->shared()
448 ->VerifyBailoutId(ast_id)));
449 simulate->set_ast_id(ast_id);
450 predecessor->last_environment()->set_ast_id(ast_id);
451 }
452}
453
454
455bool HBasicBlock::Dominates(HBasicBlock* other) const {
456 HBasicBlock* current = other->dominator();
457 while (current != NULL) {
458 if (current == this) return true;
459 current = current->dominator();
460 }
461 return false;
462}
463
464
465bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
466 if (this == other) return true;
467 return Dominates(other);
468}
469
470
471int HBasicBlock::LoopNestingDepth() const {
472 const HBasicBlock* current = this;
473 int result = (current->IsLoopHeader()) ? 1 : 0;
474 while (current->parent_loop_header() != NULL) {
475 current = current->parent_loop_header();
476 result++;
477 }
478 return result;
479}
480
481
482void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
483 DCHECK(IsLoopHeader());
484
485 SetJoinId(stmt->EntryId());
486 if (predecessors()->length() == 1) {
487 // This is a degenerated loop.
488 DetachLoopInformation();
489 return;
490 }
491
492 // Only the first entry into the loop is from outside the loop. All other
493 // entries must be back edges.
494 for (int i = 1; i < predecessors()->length(); ++i) {
495 loop_information()->RegisterBackEdge(predecessors()->at(i));
496 }
497}
498
499
500void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
501 DCHECK(IsFinished());
502 HBasicBlock* succ_block = end()->SuccessorAt(succ);
503
504 DCHECK(succ_block->predecessors()->length() == 1);
505 succ_block->MarkUnreachable();
506}
507
508
509void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
510 if (HasPredecessor()) {
511 // Only loop header blocks can have a predecessor added after
512 // instructions have been added to the block (they have phis for all
513 // values in the environment, these phis may be eliminated later).
514 DCHECK(IsLoopHeader() || first_ == NULL);
515 HEnvironment* incoming_env = pred->last_environment();
516 if (IsLoopHeader()) {
517 DCHECK_EQ(phis()->length(), incoming_env->length());
518 for (int i = 0; i < phis_.length(); ++i) {
519 phis_[i]->AddInput(incoming_env->values()->at(i));
520 }
521 } else {
522 last_environment()->AddIncomingEdge(this, pred->last_environment());
523 }
524 } else if (!HasEnvironment() && !IsFinished()) {
525 DCHECK(!IsLoopHeader());
526 SetInitialEnvironment(pred->last_environment()->Copy());
527 }
528
529 predecessors_.Add(pred, zone());
530}
531
532
533void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
534 DCHECK(!dominated_blocks_.Contains(block));
535 // Keep the list of dominated blocks sorted such that if there is two
536 // succeeding block in this list, the predecessor is before the successor.
537 int index = 0;
538 while (index < dominated_blocks_.length() &&
539 dominated_blocks_[index]->block_id() < block->block_id()) {
540 ++index;
541 }
542 dominated_blocks_.InsertAt(index, block, zone());
543}
544
545
546void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
547 if (dominator_ == NULL) {
548 dominator_ = other;
549 other->AddDominatedBlock(this);
550 } else if (other->dominator() != NULL) {
551 HBasicBlock* first = dominator_;
552 HBasicBlock* second = other;
553
554 while (first != second) {
555 if (first->block_id() > second->block_id()) {
556 first = first->dominator();
557 } else {
558 second = second->dominator();
559 }
560 DCHECK(first != NULL && second != NULL);
561 }
562
563 if (dominator_ != first) {
564 DCHECK(dominator_->dominated_blocks_.Contains(this));
565 dominator_->dominated_blocks_.RemoveElement(this);
566 dominator_ = first;
567 first->AddDominatedBlock(this);
568 }
569 }
570}
571
572
573void HBasicBlock::AssignLoopSuccessorDominators() {
574 // Mark blocks that dominate all subsequent reachable blocks inside their
575 // loop. Exploit the fact that blocks are sorted in reverse post order. When
576 // the loop is visited in increasing block id order, if the number of
577 // non-loop-exiting successor edges at the dominator_candidate block doesn't
578 // exceed the number of previously encountered predecessor edges, there is no
579 // path from the loop header to any block with higher id that doesn't go
580 // through the dominator_candidate block. In this case, the
581 // dominator_candidate block is guaranteed to dominate all blocks reachable
582 // from it with higher ids.
583 HBasicBlock* last = loop_information()->GetLastBackEdge();
584 int outstanding_successors = 1; // one edge from the pre-header
585 // Header always dominates everything.
586 MarkAsLoopSuccessorDominator();
587 for (int j = block_id(); j <= last->block_id(); ++j) {
588 HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
589 for (HPredecessorIterator it(dominator_candidate); !it.Done();
590 it.Advance()) {
591 HBasicBlock* predecessor = it.Current();
592 // Don't count back edges.
593 if (predecessor->block_id() < dominator_candidate->block_id()) {
594 outstanding_successors--;
595 }
596 }
597
598 // If more successors than predecessors have been seen in the loop up to
599 // now, it's not possible to guarantee that the current block dominates
600 // all of the blocks with higher IDs. In this case, assume conservatively
601 // that those paths through loop that don't go through the current block
602 // contain all of the loop's dependencies. Also be careful to record
603 // dominator information about the current loop that's being processed,
604 // and not nested loops, which will be processed when
605 // AssignLoopSuccessorDominators gets called on their header.
606 DCHECK(outstanding_successors >= 0);
607 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
608 if (outstanding_successors == 0 &&
609 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
610 dominator_candidate->MarkAsLoopSuccessorDominator();
611 }
612 HControlInstruction* end = dominator_candidate->end();
613 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
614 HBasicBlock* successor = it.Current();
615 // Only count successors that remain inside the loop and don't loop back
616 // to a loop header.
617 if (successor->block_id() > dominator_candidate->block_id() &&
618 successor->block_id() <= last->block_id()) {
619 // Backwards edges must land on loop headers.
620 DCHECK(successor->block_id() > dominator_candidate->block_id() ||
621 successor->IsLoopHeader());
622 outstanding_successors++;
623 }
624 }
625 }
626}
627
628
629int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
630 for (int i = 0; i < predecessors_.length(); ++i) {
631 if (predecessors_[i] == predecessor) return i;
632 }
633 UNREACHABLE();
634 return -1;
635}
636
637
638#ifdef DEBUG
639void HBasicBlock::Verify() {
640 // Check that every block is finished.
641 DCHECK(IsFinished());
642 DCHECK(block_id() >= 0);
643
644 // Check that the incoming edges are in edge split form.
645 if (predecessors_.length() > 1) {
646 for (int i = 0; i < predecessors_.length(); ++i) {
647 DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
648 }
649 }
650}
651#endif
652
653
654void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
655 this->back_edges_.Add(block, block->zone());
656 AddBlock(block);
657}
658
659
660HBasicBlock* HLoopInformation::GetLastBackEdge() const {
661 int max_id = -1;
662 HBasicBlock* result = NULL;
663 for (int i = 0; i < back_edges_.length(); ++i) {
664 HBasicBlock* cur = back_edges_[i];
665 if (cur->block_id() > max_id) {
666 max_id = cur->block_id();
667 result = cur;
668 }
669 }
670 return result;
671}
672
673
674void HLoopInformation::AddBlock(HBasicBlock* block) {
675 if (block == loop_header()) return;
676 if (block->parent_loop_header() == loop_header()) return;
677 if (block->parent_loop_header() != NULL) {
678 AddBlock(block->parent_loop_header());
679 } else {
680 block->set_parent_loop_header(loop_header());
681 blocks_.Add(block, block->zone());
682 for (int i = 0; i < block->predecessors()->length(); ++i) {
683 AddBlock(block->predecessors()->at(i));
684 }
685 }
686}
687
688
689#ifdef DEBUG
690
691// Checks reachability of the blocks in this graph and stores a bit in
692// the BitVector "reachable()" for every block that can be reached
693// from the start block of the graph. If "dont_visit" is non-null, the given
694// block is treated as if it would not be part of the graph. "visited_count()"
695// returns the number of reachable blocks.
696class ReachabilityAnalyzer BASE_EMBEDDED {
697 public:
698 ReachabilityAnalyzer(HBasicBlock* entry_block,
699 int block_count,
700 HBasicBlock* dont_visit)
701 : visited_count_(0),
702 stack_(16, entry_block->zone()),
703 reachable_(block_count, entry_block->zone()),
704 dont_visit_(dont_visit) {
705 PushBlock(entry_block);
706 Analyze();
707 }
708
709 int visited_count() const { return visited_count_; }
710 const BitVector* reachable() const { return &reachable_; }
711
712 private:
713 void PushBlock(HBasicBlock* block) {
714 if (block != NULL && block != dont_visit_ &&
715 !reachable_.Contains(block->block_id())) {
716 reachable_.Add(block->block_id());
717 stack_.Add(block, block->zone());
718 visited_count_++;
719 }
720 }
721
722 void Analyze() {
723 while (!stack_.is_empty()) {
724 HControlInstruction* end = stack_.RemoveLast()->end();
725 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
726 PushBlock(it.Current());
727 }
728 }
729 }
730
731 int visited_count_;
732 ZoneList<HBasicBlock*> stack_;
733 BitVector reachable_;
734 HBasicBlock* dont_visit_;
735};
736
737
738void HGraph::Verify(bool do_full_verify) const {
739 Heap::RelocationLock relocation_lock(isolate()->heap());
740 AllowHandleDereference allow_deref;
741 AllowDeferredHandleDereference allow_deferred_deref;
742 for (int i = 0; i < blocks_.length(); i++) {
743 HBasicBlock* block = blocks_.at(i);
744
745 block->Verify();
746
747 // Check that every block contains at least one node and that only the last
748 // node is a control instruction.
749 HInstruction* current = block->first();
750 DCHECK(current != NULL && current->IsBlockEntry());
751 while (current != NULL) {
752 DCHECK((current->next() == NULL) == current->IsControlInstruction());
753 DCHECK(current->block() == block);
754 current->Verify();
755 current = current->next();
756 }
757
758 // Check that successors are correctly set.
759 HBasicBlock* first = block->end()->FirstSuccessor();
760 HBasicBlock* second = block->end()->SecondSuccessor();
761 DCHECK(second == NULL || first != NULL);
762
763 // Check that the predecessor array is correct.
764 if (first != NULL) {
765 DCHECK(first->predecessors()->Contains(block));
766 if (second != NULL) {
767 DCHECK(second->predecessors()->Contains(block));
768 }
769 }
770
771 // Check that phis have correct arguments.
772 for (int j = 0; j < block->phis()->length(); j++) {
773 HPhi* phi = block->phis()->at(j);
774 phi->Verify();
775 }
776
777 // Check that all join blocks have predecessors that end with an
778 // unconditional goto and agree on their environment node id.
779 if (block->predecessors()->length() >= 2) {
780 BailoutId id =
781 block->predecessors()->first()->last_environment()->ast_id();
782 for (int k = 0; k < block->predecessors()->length(); k++) {
783 HBasicBlock* predecessor = block->predecessors()->at(k);
784 DCHECK(predecessor->end()->IsGoto() ||
785 predecessor->end()->IsDeoptimize());
786 DCHECK(predecessor->last_environment()->ast_id() == id);
787 }
788 }
789 }
790
791 // Check special property of first block to have no predecessors.
792 DCHECK(blocks_.at(0)->predecessors()->is_empty());
793
794 if (do_full_verify) {
795 // Check that the graph is fully connected.
796 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
797 DCHECK(analyzer.visited_count() == blocks_.length());
798
799 // Check that entry block dominator is NULL.
800 DCHECK(entry_block_->dominator() == NULL);
801
802 // Check dominators.
803 for (int i = 0; i < blocks_.length(); ++i) {
804 HBasicBlock* block = blocks_.at(i);
805 if (block->dominator() == NULL) {
806 // Only start block may have no dominator assigned to.
807 DCHECK(i == 0);
808 } else {
809 // Assert that block is unreachable if dominator must not be visited.
810 ReachabilityAnalyzer dominator_analyzer(entry_block_,
811 blocks_.length(),
812 block->dominator());
813 DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
814 }
815 }
816 }
817}
818
819#endif
820
821
822HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
823 int32_t value) {
824 if (!pointer->is_set()) {
825 // Can't pass GetInvalidContext() to HConstant::New, because that will
826 // recursively call GetConstant
827 HConstant* constant = HConstant::New(isolate(), zone(), NULL, value);
828 constant->InsertAfter(entry_block()->first());
829 pointer->set(constant);
830 return constant;
831 }
832 return ReinsertConstantIfNecessary(pointer->get());
833}
834
835
836HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
837 if (!constant->IsLinked()) {
838 // The constant was removed from the graph. Reinsert.
839 constant->ClearFlag(HValue::kIsDead);
840 constant->InsertAfter(entry_block()->first());
841 }
842 return constant;
843}
844
845
846HConstant* HGraph::GetConstant0() {
847 return GetConstant(&constant_0_, 0);
848}
849
850
851HConstant* HGraph::GetConstant1() {
852 return GetConstant(&constant_1_, 1);
853}
854
855
856HConstant* HGraph::GetConstantMinus1() {
857 return GetConstant(&constant_minus1_, -1);
858}
859
860
861HConstant* HGraph::GetConstantBool(bool value) {
862 return value ? GetConstantTrue() : GetConstantFalse();
863}
864
Ben Murdochda12d292016-06-02 14:46:10 +0100865#define DEFINE_GET_CONSTANT(Name, name, constant, type, htype, boolean_value, \
866 undetectable) \
867 HConstant* HGraph::GetConstant##Name() { \
868 if (!constant_##name##_.is_set()) { \
869 HConstant* constant = new (zone()) HConstant( \
870 Unique<Object>::CreateImmovable(isolate()->factory()->constant()), \
871 Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \
872 false, Representation::Tagged(), htype, true, boolean_value, \
873 undetectable, ODDBALL_TYPE); \
874 constant->InsertAfter(entry_block()->first()); \
875 constant_##name##_.set(constant); \
876 } \
877 return ReinsertConstantIfNecessary(constant_##name##_.get()); \
Ben Murdoch097c5b22016-05-18 11:27:45 +0100878 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000879
Ben Murdochda12d292016-06-02 14:46:10 +0100880DEFINE_GET_CONSTANT(Undefined, undefined, undefined_value, undefined,
881 HType::Undefined(), false, true)
882DEFINE_GET_CONSTANT(True, true, true_value, boolean, HType::Boolean(), true,
883 false)
884DEFINE_GET_CONSTANT(False, false, false_value, boolean, HType::Boolean(), false,
885 false)
886DEFINE_GET_CONSTANT(Hole, the_hole, the_hole_value, the_hole, HType::None(),
887 false, false)
888DEFINE_GET_CONSTANT(Null, null, null_value, null, HType::Null(), false, true)
889DEFINE_GET_CONSTANT(OptimizedOut, optimized_out, optimized_out, optimized_out,
890 HType::None(), false, false)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891
892#undef DEFINE_GET_CONSTANT
893
894#define DEFINE_IS_CONSTANT(Name, name) \
895bool HGraph::IsConstant##Name(HConstant* constant) { \
896 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
897}
898DEFINE_IS_CONSTANT(Undefined, undefined)
899DEFINE_IS_CONSTANT(0, 0)
900DEFINE_IS_CONSTANT(1, 1)
901DEFINE_IS_CONSTANT(Minus1, minus1)
902DEFINE_IS_CONSTANT(True, true)
903DEFINE_IS_CONSTANT(False, false)
904DEFINE_IS_CONSTANT(Hole, the_hole)
905DEFINE_IS_CONSTANT(Null, null)
906
907#undef DEFINE_IS_CONSTANT
908
909
910HConstant* HGraph::GetInvalidContext() {
911 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
912}
913
914
915bool HGraph::IsStandardConstant(HConstant* constant) {
916 if (IsConstantUndefined(constant)) return true;
917 if (IsConstant0(constant)) return true;
918 if (IsConstant1(constant)) return true;
919 if (IsConstantMinus1(constant)) return true;
920 if (IsConstantTrue(constant)) return true;
921 if (IsConstantFalse(constant)) return true;
922 if (IsConstantHole(constant)) return true;
923 if (IsConstantNull(constant)) return true;
924 return false;
925}
926
927
928HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
929
930
931HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
932 : needs_compare_(true) {
933 Initialize(builder);
934}
935
936
937HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
938 HIfContinuation* continuation)
939 : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
940 InitializeDontCreateBlocks(builder);
941 continuation->Continue(&first_true_block_, &first_false_block_);
942}
943
944
945void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
946 HGraphBuilder* builder) {
947 builder_ = builder;
948 finished_ = false;
949 did_then_ = false;
950 did_else_ = false;
951 did_else_if_ = false;
952 did_and_ = false;
953 did_or_ = false;
954 captured_ = false;
955 pending_merge_block_ = false;
956 split_edge_merge_block_ = NULL;
957 merge_at_join_blocks_ = NULL;
958 normal_merge_at_join_block_count_ = 0;
959 deopt_merge_at_join_block_count_ = 0;
960}
961
962
963void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
964 InitializeDontCreateBlocks(builder);
965 HEnvironment* env = builder->environment();
966 first_true_block_ = builder->CreateBasicBlock(env->Copy());
967 first_false_block_ = builder->CreateBasicBlock(env->Copy());
968}
969
970
971HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
972 HControlInstruction* compare) {
973 DCHECK(did_then_ == did_else_);
974 if (did_else_) {
975 // Handle if-then-elseif
976 did_else_if_ = true;
977 did_else_ = false;
978 did_then_ = false;
979 did_and_ = false;
980 did_or_ = false;
981 pending_merge_block_ = false;
982 split_edge_merge_block_ = NULL;
983 HEnvironment* env = builder()->environment();
984 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
985 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
986 }
987 if (split_edge_merge_block_ != NULL) {
988 HEnvironment* env = first_false_block_->last_environment();
989 HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
990 if (did_or_) {
991 compare->SetSuccessorAt(0, split_edge);
992 compare->SetSuccessorAt(1, first_false_block_);
993 } else {
994 compare->SetSuccessorAt(0, first_true_block_);
995 compare->SetSuccessorAt(1, split_edge);
996 }
997 builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
998 } else {
999 compare->SetSuccessorAt(0, first_true_block_);
1000 compare->SetSuccessorAt(1, first_false_block_);
1001 }
1002 builder()->FinishCurrentBlock(compare);
1003 needs_compare_ = false;
1004 return compare;
1005}
1006
1007
1008void HGraphBuilder::IfBuilder::Or() {
1009 DCHECK(!needs_compare_);
1010 DCHECK(!did_and_);
1011 did_or_ = true;
1012 HEnvironment* env = first_false_block_->last_environment();
1013 if (split_edge_merge_block_ == NULL) {
1014 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
1015 builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
1016 first_true_block_ = split_edge_merge_block_;
1017 }
1018 builder()->set_current_block(first_false_block_);
1019 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
1020}
1021
1022
1023void HGraphBuilder::IfBuilder::And() {
1024 DCHECK(!needs_compare_);
1025 DCHECK(!did_or_);
1026 did_and_ = true;
1027 HEnvironment* env = first_false_block_->last_environment();
1028 if (split_edge_merge_block_ == NULL) {
1029 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
1030 builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
1031 first_false_block_ = split_edge_merge_block_;
1032 }
1033 builder()->set_current_block(first_true_block_);
1034 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
1035}
1036
1037
1038void HGraphBuilder::IfBuilder::CaptureContinuation(
1039 HIfContinuation* continuation) {
1040 DCHECK(!did_else_if_);
1041 DCHECK(!finished_);
1042 DCHECK(!captured_);
1043
1044 HBasicBlock* true_block = NULL;
1045 HBasicBlock* false_block = NULL;
1046 Finish(&true_block, &false_block);
1047 DCHECK(true_block != NULL);
1048 DCHECK(false_block != NULL);
1049 continuation->Capture(true_block, false_block);
1050 captured_ = true;
1051 builder()->set_current_block(NULL);
1052 End();
1053}
1054
1055
1056void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
1057 DCHECK(!did_else_if_);
1058 DCHECK(!finished_);
1059 DCHECK(!captured_);
1060 HBasicBlock* true_block = NULL;
1061 HBasicBlock* false_block = NULL;
1062 Finish(&true_block, &false_block);
1063 merge_at_join_blocks_ = NULL;
1064 if (true_block != NULL && !true_block->IsFinished()) {
1065 DCHECK(continuation->IsTrueReachable());
1066 builder()->GotoNoSimulate(true_block, continuation->true_branch());
1067 }
1068 if (false_block != NULL && !false_block->IsFinished()) {
1069 DCHECK(continuation->IsFalseReachable());
1070 builder()->GotoNoSimulate(false_block, continuation->false_branch());
1071 }
1072 captured_ = true;
1073 End();
1074}
1075
1076
1077void HGraphBuilder::IfBuilder::Then() {
1078 DCHECK(!captured_);
1079 DCHECK(!finished_);
1080 did_then_ = true;
1081 if (needs_compare_) {
1082 // Handle if's without any expressions, they jump directly to the "else"
1083 // branch. However, we must pretend that the "then" branch is reachable,
1084 // so that the graph builder visits it and sees any live range extending
1085 // constructs within it.
1086 HConstant* constant_false = builder()->graph()->GetConstantFalse();
Ben Murdochda12d292016-06-02 14:46:10 +01001087 ToBooleanICStub::Types boolean_type = ToBooleanICStub::Types();
1088 boolean_type.Add(ToBooleanICStub::BOOLEAN);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001089 HBranch* branch = builder()->New<HBranch>(
1090 constant_false, boolean_type, first_true_block_, first_false_block_);
1091 builder()->FinishCurrentBlock(branch);
1092 }
1093 builder()->set_current_block(first_true_block_);
1094 pending_merge_block_ = true;
1095}
1096
1097
1098void HGraphBuilder::IfBuilder::Else() {
1099 DCHECK(did_then_);
1100 DCHECK(!captured_);
1101 DCHECK(!finished_);
1102 AddMergeAtJoinBlock(false);
1103 builder()->set_current_block(first_false_block_);
1104 pending_merge_block_ = true;
1105 did_else_ = true;
1106}
1107
1108
1109void HGraphBuilder::IfBuilder::Deopt(Deoptimizer::DeoptReason reason) {
1110 DCHECK(did_then_);
1111 builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1112 AddMergeAtJoinBlock(true);
1113}
1114
1115
1116void HGraphBuilder::IfBuilder::Return(HValue* value) {
1117 HValue* parameter_count = builder()->graph()->GetConstantMinus1();
1118 builder()->FinishExitCurrentBlock(
1119 builder()->New<HReturn>(value, parameter_count));
1120 AddMergeAtJoinBlock(false);
1121}
1122
1123
1124void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
1125 if (!pending_merge_block_) return;
1126 HBasicBlock* block = builder()->current_block();
1127 DCHECK(block == NULL || !block->IsFinished());
1128 MergeAtJoinBlock* record = new (builder()->zone())
1129 MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
1130 merge_at_join_blocks_ = record;
1131 if (block != NULL) {
1132 DCHECK(block->end() == NULL);
1133 if (deopt) {
1134 normal_merge_at_join_block_count_++;
1135 } else {
1136 deopt_merge_at_join_block_count_++;
1137 }
1138 }
1139 builder()->set_current_block(NULL);
1140 pending_merge_block_ = false;
1141}
1142
1143
1144void HGraphBuilder::IfBuilder::Finish() {
1145 DCHECK(!finished_);
1146 if (!did_then_) {
1147 Then();
1148 }
1149 AddMergeAtJoinBlock(false);
1150 if (!did_else_) {
1151 Else();
1152 AddMergeAtJoinBlock(false);
1153 }
1154 finished_ = true;
1155}
1156
1157
1158void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
1159 HBasicBlock** else_continuation) {
1160 Finish();
1161
1162 MergeAtJoinBlock* else_record = merge_at_join_blocks_;
1163 if (else_continuation != NULL) {
1164 *else_continuation = else_record->block_;
1165 }
1166 MergeAtJoinBlock* then_record = else_record->next_;
1167 if (then_continuation != NULL) {
1168 *then_continuation = then_record->block_;
1169 }
1170 DCHECK(then_record->next_ == NULL);
1171}
1172
1173
1174void HGraphBuilder::IfBuilder::EndUnreachable() {
1175 if (captured_) return;
1176 Finish();
1177 builder()->set_current_block(nullptr);
1178}
1179
1180
1181void HGraphBuilder::IfBuilder::End() {
1182 if (captured_) return;
1183 Finish();
1184
1185 int total_merged_blocks = normal_merge_at_join_block_count_ +
1186 deopt_merge_at_join_block_count_;
1187 DCHECK(total_merged_blocks >= 1);
1188 HBasicBlock* merge_block =
1189 total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
1190
1191 // Merge non-deopt blocks first to ensure environment has right size for
1192 // padding.
1193 MergeAtJoinBlock* current = merge_at_join_blocks_;
1194 while (current != NULL) {
1195 if (!current->deopt_ && current->block_ != NULL) {
1196 // If there is only one block that makes it through to the end of the
1197 // if, then just set it as the current block and continue rather then
1198 // creating an unnecessary merge block.
1199 if (total_merged_blocks == 1) {
1200 builder()->set_current_block(current->block_);
1201 return;
1202 }
1203 builder()->GotoNoSimulate(current->block_, merge_block);
1204 }
1205 current = current->next_;
1206 }
1207
1208 // Merge deopt blocks, padding when necessary.
1209 current = merge_at_join_blocks_;
1210 while (current != NULL) {
1211 if (current->deopt_ && current->block_ != NULL) {
1212 current->block_->FinishExit(
1213 HAbnormalExit::New(builder()->isolate(), builder()->zone(), NULL),
1214 SourcePosition::Unknown());
1215 }
1216 current = current->next_;
1217 }
1218 builder()->set_current_block(merge_block);
1219}
1220
1221
1222HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
1223 Initialize(builder, NULL, kWhileTrue, NULL);
1224}
1225
1226
1227HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1228 LoopBuilder::Direction direction) {
1229 Initialize(builder, context, direction, builder->graph()->GetConstant1());
1230}
1231
1232
1233HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1234 LoopBuilder::Direction direction,
1235 HValue* increment_amount) {
1236 Initialize(builder, context, direction, increment_amount);
1237 increment_amount_ = increment_amount;
1238}
1239
1240
1241void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
1242 HValue* context,
1243 Direction direction,
1244 HValue* increment_amount) {
1245 builder_ = builder;
1246 context_ = context;
1247 direction_ = direction;
1248 increment_amount_ = increment_amount;
1249
1250 finished_ = false;
1251 header_block_ = builder->CreateLoopHeaderBlock();
1252 body_block_ = NULL;
1253 exit_block_ = NULL;
1254 exit_trampoline_block_ = NULL;
1255}
1256
1257
1258HValue* HGraphBuilder::LoopBuilder::BeginBody(
1259 HValue* initial,
1260 HValue* terminating,
1261 Token::Value token) {
1262 DCHECK(direction_ != kWhileTrue);
1263 HEnvironment* env = builder_->environment();
1264 phi_ = header_block_->AddNewPhi(env->values()->length());
1265 phi_->AddInput(initial);
1266 env->Push(initial);
1267 builder_->GotoNoSimulate(header_block_);
1268
1269 HEnvironment* body_env = env->Copy();
1270 HEnvironment* exit_env = env->Copy();
1271 // Remove the phi from the expression stack
1272 body_env->Pop();
1273 exit_env->Pop();
1274 body_block_ = builder_->CreateBasicBlock(body_env);
1275 exit_block_ = builder_->CreateBasicBlock(exit_env);
1276
1277 builder_->set_current_block(header_block_);
1278 env->Pop();
1279 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1280 phi_, terminating, token, body_block_, exit_block_));
1281
1282 builder_->set_current_block(body_block_);
1283 if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1284 Isolate* isolate = builder_->isolate();
1285 HValue* one = builder_->graph()->GetConstant1();
1286 if (direction_ == kPreIncrement) {
1287 increment_ = HAdd::New(isolate, zone(), context_, phi_, one);
1288 } else {
1289 increment_ = HSub::New(isolate, zone(), context_, phi_, one);
1290 }
1291 increment_->ClearFlag(HValue::kCanOverflow);
1292 builder_->AddInstruction(increment_);
1293 return increment_;
1294 } else {
1295 return phi_;
1296 }
1297}
1298
1299
1300void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
1301 DCHECK(direction_ == kWhileTrue);
1302 HEnvironment* env = builder_->environment();
1303 builder_->GotoNoSimulate(header_block_);
1304 builder_->set_current_block(header_block_);
1305 env->Drop(drop_count);
1306}
1307
1308
1309void HGraphBuilder::LoopBuilder::Break() {
1310 if (exit_trampoline_block_ == NULL) {
1311 // Its the first time we saw a break.
1312 if (direction_ == kWhileTrue) {
1313 HEnvironment* env = builder_->environment()->Copy();
1314 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1315 } else {
1316 HEnvironment* env = exit_block_->last_environment()->Copy();
1317 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1318 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1319 }
1320 }
1321
1322 builder_->GotoNoSimulate(exit_trampoline_block_);
1323 builder_->set_current_block(NULL);
1324}
1325
1326
1327void HGraphBuilder::LoopBuilder::EndBody() {
1328 DCHECK(!finished_);
1329
1330 if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1331 Isolate* isolate = builder_->isolate();
1332 if (direction_ == kPostIncrement) {
1333 increment_ =
1334 HAdd::New(isolate, zone(), context_, phi_, increment_amount_);
1335 } else {
1336 increment_ =
1337 HSub::New(isolate, zone(), context_, phi_, increment_amount_);
1338 }
1339 increment_->ClearFlag(HValue::kCanOverflow);
1340 builder_->AddInstruction(increment_);
1341 }
1342
1343 if (direction_ != kWhileTrue) {
1344 // Push the new increment value on the expression stack to merge into
1345 // the phi.
1346 builder_->environment()->Push(increment_);
1347 }
1348 HBasicBlock* last_block = builder_->current_block();
1349 builder_->GotoNoSimulate(last_block, header_block_);
1350 header_block_->loop_information()->RegisterBackEdge(last_block);
1351
1352 if (exit_trampoline_block_ != NULL) {
1353 builder_->set_current_block(exit_trampoline_block_);
1354 } else {
1355 builder_->set_current_block(exit_block_);
1356 }
1357 finished_ = true;
1358}
1359
1360
1361HGraph* HGraphBuilder::CreateGraph() {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001362 graph_ = new (zone()) HGraph(info_, descriptor_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001363 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
Ben Murdochc5610432016-08-08 18:44:38 +01001364 if (!info_->IsStub() && info_->is_tracking_positions()) {
1365 TraceInlinedFunction(info_->shared_info(), SourcePosition::Unknown());
1366 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367 CompilationPhase phase("H_Block building", info_);
1368 set_current_block(graph()->entry_block());
1369 if (!BuildGraph()) return NULL;
1370 graph()->FinalizeUniqueness();
1371 return graph_;
1372}
1373
Ben Murdochc5610432016-08-08 18:44:38 +01001374int HGraphBuilder::TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
1375 SourcePosition position) {
1376 DCHECK(info_->is_tracking_positions());
1377
1378 int inline_id = static_cast<int>(graph()->inlined_function_infos().size());
1379 HInlinedFunctionInfo info(shared->start_position());
Ben Murdoch61f157c2016-09-16 13:49:30 +01001380 if (!shared->script()->IsUndefined(isolate())) {
1381 Handle<Script> script(Script::cast(shared->script()), isolate());
Ben Murdochc5610432016-08-08 18:44:38 +01001382
Ben Murdoch61f157c2016-09-16 13:49:30 +01001383 if (FLAG_hydrogen_track_positions &&
1384 !script->source()->IsUndefined(isolate())) {
Ben Murdochc5610432016-08-08 18:44:38 +01001385 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
1386 Object* source_name = script->name();
1387 OFStream os(tracing_scope.file());
1388 os << "--- FUNCTION SOURCE (";
1389 if (source_name->IsString()) {
1390 os << String::cast(source_name)->ToCString().get() << ":";
1391 }
1392 os << shared->DebugName()->ToCString().get() << ") id{";
1393 os << info_->optimization_id() << "," << inline_id << "} ---\n";
1394 {
1395 DisallowHeapAllocation no_allocation;
1396 int start = shared->start_position();
1397 int len = shared->end_position() - start;
1398 String::SubStringRange source(String::cast(script->source()), start,
1399 len);
1400 for (const auto& c : source) {
1401 os << AsReversiblyEscapedUC16(c);
1402 }
1403 }
1404
1405 os << "\n--- END ---\n";
1406 }
1407 }
1408
1409 graph()->inlined_function_infos().push_back(info);
1410
1411 if (FLAG_hydrogen_track_positions && inline_id != 0) {
1412 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
1413 OFStream os(tracing_scope.file());
1414 os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
1415 << info_->optimization_id() << "," << inline_id << "} AS " << inline_id
1416 << " AT " << position << std::endl;
1417 }
1418
1419 return inline_id;
1420}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001421
1422HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1423 DCHECK(current_block() != NULL);
1424 DCHECK(!FLAG_hydrogen_track_positions ||
1425 !position_.IsUnknown() ||
1426 !info_->IsOptimizing());
1427 current_block()->AddInstruction(instr, source_position());
1428 if (graph()->IsInsideNoSideEffectsScope()) {
1429 instr->SetFlag(HValue::kHasNoObservableSideEffects);
1430 }
1431 return instr;
1432}
1433
1434
1435void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1436 DCHECK(!FLAG_hydrogen_track_positions ||
1437 !info_->IsOptimizing() ||
1438 !position_.IsUnknown());
1439 current_block()->Finish(last, source_position());
1440 if (last->IsReturn() || last->IsAbnormalExit()) {
1441 set_current_block(NULL);
1442 }
1443}
1444
1445
1446void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1447 DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1448 !position_.IsUnknown());
1449 current_block()->FinishExit(instruction, source_position());
1450 if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1451 set_current_block(NULL);
1452 }
1453}
1454
1455
1456void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1457 if (FLAG_native_code_counters && counter->Enabled()) {
1458 HValue* reference = Add<HConstant>(ExternalReference(counter));
1459 HValue* old_value =
1460 Add<HLoadNamedField>(reference, nullptr, HObjectAccess::ForCounter());
1461 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1462 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
1463 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1464 new_value, STORE_TO_INITIALIZED_ENTRY);
1465 }
1466}
1467
1468
1469void HGraphBuilder::AddSimulate(BailoutId id,
1470 RemovableSimulate removable) {
1471 DCHECK(current_block() != NULL);
1472 DCHECK(!graph()->IsInsideNoSideEffectsScope());
1473 current_block()->AddNewSimulate(id, source_position(), removable);
1474}
1475
1476
1477HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1478 HBasicBlock* b = graph()->CreateBasicBlock();
1479 b->SetInitialEnvironment(env);
1480 return b;
1481}
1482
1483
1484HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1485 HBasicBlock* header = graph()->CreateBasicBlock();
1486 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1487 header->SetInitialEnvironment(entry_env);
1488 header->AttachLoopInformation();
1489 return header;
1490}
1491
1492
1493HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
1494 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
1495
1496 HValue* bit_field2 =
1497 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
1498 return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
1499}
1500
1501
Ben Murdoch097c5b22016-05-18 11:27:45 +01001502HValue* HGraphBuilder::BuildEnumLength(HValue* map) {
1503 NoObservableSideEffectsScope scope(this);
1504 HValue* bit_field3 =
1505 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
1506 return BuildDecodeField<Map::EnumLengthBits>(bit_field3);
1507}
1508
1509
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001510HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1511 if (obj->type().IsHeapObject()) return obj;
1512 return Add<HCheckHeapObject>(obj);
1513}
1514
1515
1516void HGraphBuilder::FinishExitWithHardDeoptimization(
1517 Deoptimizer::DeoptReason reason) {
1518 Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1519 FinishExitCurrentBlock(New<HAbnormalExit>());
1520}
1521
1522
1523HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1524 if (!string->type().IsString()) {
1525 DCHECK(!string->IsConstant() ||
1526 !HConstant::cast(string)->HasStringValue());
1527 BuildCheckHeapObject(string);
1528 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1529 }
1530 return string;
1531}
1532
Ben Murdochda12d292016-06-02 14:46:10 +01001533HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* checked) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 if (object->type().IsJSObject()) return object;
Ben Murdochda12d292016-06-02 14:46:10 +01001535 HValue* function = checked->ActualValue();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536 if (function->IsConstant() &&
1537 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1538 Handle<JSFunction> f = Handle<JSFunction>::cast(
1539 HConstant::cast(function)->handle(isolate()));
1540 SharedFunctionInfo* shared = f->shared();
1541 if (is_strict(shared->language_mode()) || shared->native()) return object;
1542 }
Ben Murdochda12d292016-06-02 14:46:10 +01001543 return Add<HWrapReceiver>(object, checked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544}
1545
1546
1547HValue* HGraphBuilder::BuildCheckAndGrowElementsCapacity(
1548 HValue* object, HValue* elements, ElementsKind kind, HValue* length,
1549 HValue* capacity, HValue* key) {
1550 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1551 HValue* max_capacity = AddUncasted<HAdd>(capacity, max_gap);
1552 Add<HBoundsCheck>(key, max_capacity);
1553
1554 HValue* new_capacity = BuildNewElementsCapacity(key);
1555 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind,
1556 length, new_capacity);
1557 return new_elements;
1558}
1559
1560
1561HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1562 HValue* object,
1563 HValue* elements,
1564 ElementsKind kind,
1565 HValue* length,
1566 HValue* key,
1567 bool is_js_array,
1568 PropertyAccessType access_type) {
1569 IfBuilder length_checker(this);
1570
1571 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1572 length_checker.If<HCompareNumericAndBranch>(key, length, token);
1573
1574 length_checker.Then();
1575
1576 HValue* current_capacity = AddLoadFixedArrayLength(elements);
1577
1578 if (top_info()->IsStub()) {
1579 IfBuilder capacity_checker(this);
1580 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1581 Token::GTE);
1582 capacity_checker.Then();
1583 HValue* new_elements = BuildCheckAndGrowElementsCapacity(
1584 object, elements, kind, length, current_capacity, key);
1585 environment()->Push(new_elements);
1586 capacity_checker.Else();
1587 environment()->Push(elements);
1588 capacity_checker.End();
1589 } else {
1590 HValue* result = Add<HMaybeGrowElements>(
1591 object, elements, key, current_capacity, is_js_array, kind);
1592 environment()->Push(result);
1593 }
1594
1595 if (is_js_array) {
1596 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1597 new_length->ClearFlag(HValue::kCanOverflow);
1598
1599 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1600 new_length);
1601 }
1602
1603 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1604 HValue* checked_elements = environment()->Top();
1605
1606 // Write zero to ensure that the new element is initialized with some smi.
1607 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), nullptr,
1608 kind);
1609 }
1610
1611 length_checker.Else();
1612 Add<HBoundsCheck>(key, length);
1613
1614 environment()->Push(elements);
1615 length_checker.End();
1616
1617 return environment()->Pop();
1618}
1619
1620
1621HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1622 HValue* elements,
1623 ElementsKind kind,
1624 HValue* length) {
1625 Factory* factory = isolate()->factory();
1626
1627 IfBuilder cow_checker(this);
1628
1629 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1630 cow_checker.Then();
1631
1632 HValue* capacity = AddLoadFixedArrayLength(elements);
1633
1634 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1635 kind, length, capacity);
1636
1637 environment()->Push(new_elements);
1638
1639 cow_checker.Else();
1640
1641 environment()->Push(elements);
1642
1643 cow_checker.End();
1644
1645 return environment()->Pop();
1646}
1647
1648
1649void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1650 HValue* map,
1651 ElementsKind from_kind,
1652 ElementsKind to_kind,
1653 bool is_jsarray) {
1654 DCHECK(!IsFastHoleyElementsKind(from_kind) ||
1655 IsFastHoleyElementsKind(to_kind));
1656
1657 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1658 Add<HTrapAllocationMemento>(object);
1659 }
1660
1661 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1662 HInstruction* elements = AddLoadElements(object);
1663
1664 HInstruction* empty_fixed_array = Add<HConstant>(
1665 isolate()->factory()->empty_fixed_array());
1666
1667 IfBuilder if_builder(this);
1668
1669 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1670
1671 if_builder.Then();
1672
1673 HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1674
1675 HInstruction* array_length =
1676 is_jsarray
1677 ? Add<HLoadNamedField>(object, nullptr,
1678 HObjectAccess::ForArrayLength(from_kind))
1679 : elements_length;
1680
1681 BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1682 array_length, elements_length);
1683
1684 if_builder.End();
1685 }
1686
1687 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1688}
1689
1690
1691void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
1692 int bit_field_mask) {
1693 // Check that the object isn't a smi.
1694 Add<HCheckHeapObject>(receiver);
1695
1696 // Get the map of the receiver.
1697 HValue* map =
1698 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1699
1700 // Check the instance type and if an access check is needed, this can be
1701 // done with a single load, since both bytes are adjacent in the map.
1702 HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
1703 HValue* instance_type_and_bit_field =
1704 Add<HLoadNamedField>(map, nullptr, access);
1705
1706 HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
1707 HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
1708 instance_type_and_bit_field,
1709 mask);
1710 HValue* sub_result = AddUncasted<HSub>(and_result,
1711 Add<HConstant>(JS_OBJECT_TYPE));
1712 Add<HBoundsCheck>(sub_result,
1713 Add<HConstant>(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE));
1714}
1715
1716
1717void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
1718 HIfContinuation* join_continuation) {
1719 // The sometimes unintuitively backward ordering of the ifs below is
1720 // convoluted, but necessary. All of the paths must guarantee that the
1721 // if-true of the continuation returns a smi element index and the if-false of
1722 // the continuation returns either a symbol or a unique string key. All other
1723 // object types cause a deopt to fall back to the runtime.
1724
1725 IfBuilder key_smi_if(this);
1726 key_smi_if.If<HIsSmiAndBranch>(key);
1727 key_smi_if.Then();
1728 {
1729 Push(key); // Nothing to do, just continue to true of continuation.
1730 }
1731 key_smi_if.Else();
1732 {
1733 HValue* map = Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForMap());
1734 HValue* instance_type =
1735 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1736
1737 // Non-unique string, check for a string with a hash code that is actually
1738 // an index.
1739 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
1740 IfBuilder not_string_or_name_if(this);
1741 not_string_or_name_if.If<HCompareNumericAndBranch>(
1742 instance_type,
1743 Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
1744 Token::GT);
1745
1746 not_string_or_name_if.Then();
1747 {
1748 // Non-smi, non-Name, non-String: Try to convert to smi in case of
1749 // HeapNumber.
1750 // TODO(danno): This could call some variant of ToString
1751 Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
1752 }
1753 not_string_or_name_if.Else();
1754 {
1755 // String or Name: check explicitly for Name, they can short-circuit
1756 // directly to unique non-index key path.
1757 IfBuilder not_symbol_if(this);
1758 not_symbol_if.If<HCompareNumericAndBranch>(
1759 instance_type,
1760 Add<HConstant>(SYMBOL_TYPE),
1761 Token::NE);
1762
1763 not_symbol_if.Then();
1764 {
1765 // String: check whether the String is a String of an index. If it is,
1766 // extract the index value from the hash.
1767 HValue* hash = Add<HLoadNamedField>(key, nullptr,
1768 HObjectAccess::ForNameHashField());
1769 HValue* not_index_mask = Add<HConstant>(static_cast<int>(
1770 String::kContainsCachedArrayIndexMask));
1771
1772 HValue* not_index_test = AddUncasted<HBitwise>(
1773 Token::BIT_AND, hash, not_index_mask);
1774
1775 IfBuilder string_index_if(this);
1776 string_index_if.If<HCompareNumericAndBranch>(not_index_test,
1777 graph()->GetConstant0(),
1778 Token::EQ);
1779 string_index_if.Then();
1780 {
1781 // String with index in hash: extract string and merge to index path.
1782 Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
1783 }
1784 string_index_if.Else();
1785 {
1786 // Key is a non-index String, check for uniqueness/internalization.
1787 // If it's not internalized yet, internalize it now.
1788 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1789 Token::BIT_AND,
1790 instance_type,
1791 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1792
1793 IfBuilder internalized(this);
1794 internalized.If<HCompareNumericAndBranch>(not_internalized_bit,
1795 graph()->GetConstant0(),
1796 Token::EQ);
1797 internalized.Then();
1798 Push(key);
1799
1800 internalized.Else();
1801 Add<HPushArguments>(key);
1802 HValue* intern_key = Add<HCallRuntime>(
1803 Runtime::FunctionForId(Runtime::kInternalizeString), 1);
1804 Push(intern_key);
1805
1806 internalized.End();
1807 // Key guaranteed to be a unique string
1808 }
1809 string_index_if.JoinContinuation(join_continuation);
1810 }
1811 not_symbol_if.Else();
1812 {
1813 Push(key); // Key is symbol
1814 }
1815 not_symbol_if.JoinContinuation(join_continuation);
1816 }
1817 not_string_or_name_if.JoinContinuation(join_continuation);
1818 }
1819 key_smi_if.JoinContinuation(join_continuation);
1820}
1821
1822
1823void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
1824 // Get the the instance type of the receiver, and make sure that it is
1825 // not one of the global object types.
1826 HValue* map =
1827 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1828 HValue* instance_type =
1829 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1830 HValue* global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
1831
1832 IfBuilder if_global_object(this);
1833 if_global_object.If<HCompareNumericAndBranch>(instance_type, global_type,
1834 Token::EQ);
1835 if_global_object.ThenDeopt(Deoptimizer::kReceiverWasAGlobalObject);
1836 if_global_object.End();
1837}
1838
1839
1840void HGraphBuilder::BuildTestForDictionaryProperties(
1841 HValue* object,
1842 HIfContinuation* continuation) {
1843 HValue* properties = Add<HLoadNamedField>(
1844 object, nullptr, HObjectAccess::ForPropertiesPointer());
1845 HValue* properties_map =
1846 Add<HLoadNamedField>(properties, nullptr, HObjectAccess::ForMap());
1847 HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
1848 IfBuilder builder(this);
1849 builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
1850 builder.CaptureContinuation(continuation);
1851}
1852
1853
1854HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
1855 HValue* key) {
1856 // Load the map of the receiver, compute the keyed lookup cache hash
1857 // based on 32 bits of the map pointer and the string hash.
1858 HValue* object_map =
1859 Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMapAsInteger32());
1860 HValue* shifted_map = AddUncasted<HShr>(
1861 object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
1862 HValue* string_hash =
1863 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForStringHashField());
1864 HValue* shifted_hash = AddUncasted<HShr>(
1865 string_hash, Add<HConstant>(String::kHashShift));
1866 HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
1867 shifted_hash);
1868 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
1869 return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
1870 Add<HConstant>(mask));
1871}
1872
1873
1874HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1875 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1876 HValue* seed = Add<HConstant>(seed_value);
1877 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1878
1879 // hash = ~hash + (hash << 15);
1880 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1881 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1882 graph()->GetConstantMinus1());
1883 hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1884
1885 // hash = hash ^ (hash >> 12);
1886 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1887 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1888
1889 // hash = hash + (hash << 2);
1890 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1891 hash = AddUncasted<HAdd>(hash, shifted_hash);
1892
1893 // hash = hash ^ (hash >> 4);
1894 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1895 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1896
1897 // hash = hash * 2057;
1898 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1899 hash->ClearFlag(HValue::kCanOverflow);
1900
1901 // hash = hash ^ (hash >> 16);
1902 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1903 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1904}
1905
Ben Murdoch097c5b22016-05-18 11:27:45 +01001906HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1907 HValue* elements,
1908 HValue* key,
1909 HValue* hash) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001910 HValue* capacity =
1911 Add<HLoadKeyed>(elements, Add<HConstant>(NameDictionary::kCapacityIndex),
1912 nullptr, nullptr, FAST_ELEMENTS);
1913
1914 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1915 mask->ChangeRepresentation(Representation::Integer32());
1916 mask->ClearFlag(HValue::kCanOverflow);
1917
1918 HValue* entry = hash;
1919 HValue* count = graph()->GetConstant1();
1920 Push(entry);
1921 Push(count);
1922
1923 HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
1924 graph()->CreateBasicBlock());
1925 HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
1926 graph()->CreateBasicBlock());
1927 LoopBuilder probe_loop(this);
1928 probe_loop.BeginBody(2); // Drop entry, count from last environment to
1929 // appease live range building without simulates.
1930
1931 count = Pop();
1932 entry = Pop();
1933 entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
1934 int entry_size = SeededNumberDictionary::kEntrySize;
1935 HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
1936 base_index->ClearFlag(HValue::kCanOverflow);
1937 int start_offset = SeededNumberDictionary::kElementsStartIndex;
1938 HValue* key_index =
1939 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
1940 key_index->ClearFlag(HValue::kCanOverflow);
1941
1942 HValue* candidate_key =
1943 Add<HLoadKeyed>(elements, key_index, nullptr, nullptr, FAST_ELEMENTS);
1944 IfBuilder if_undefined(this);
1945 if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
1946 graph()->GetConstantUndefined());
1947 if_undefined.Then();
1948 {
1949 // element == undefined means "not found". Call the runtime.
1950 // TODO(jkummerow): walk the prototype chain instead.
1951 Add<HPushArguments>(receiver, key);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001952 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1953 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001954 }
1955 if_undefined.Else();
1956 {
1957 IfBuilder if_match(this);
1958 if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
1959 if_match.Then();
1960 if_match.Else();
1961
1962 // Update non-internalized string in the dictionary with internalized key?
1963 IfBuilder if_update_with_internalized(this);
1964 HValue* smi_check =
1965 if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
1966 if_update_with_internalized.And();
1967 HValue* map = AddLoadMap(candidate_key, smi_check);
1968 HValue* instance_type =
1969 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1970 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1971 Token::BIT_AND, instance_type,
1972 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1973 if_update_with_internalized.If<HCompareNumericAndBranch>(
1974 not_internalized_bit, graph()->GetConstant0(), Token::NE);
1975 if_update_with_internalized.And();
1976 if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
1977 candidate_key, graph()->GetConstantHole());
1978 if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
1979 key, Token::EQ);
1980 if_update_with_internalized.Then();
1981 // Replace a key that is a non-internalized string by the equivalent
1982 // internalized string for faster further lookups.
1983 Add<HStoreKeyed>(elements, key_index, key, nullptr, FAST_ELEMENTS);
1984 if_update_with_internalized.Else();
1985
1986 if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
1987 if_match.JoinContinuation(&found_key_match_continuation);
1988
1989 IfBuilder found_key_match(this, &found_key_match_continuation);
1990 found_key_match.Then();
1991 // Key at current probe matches. Relevant bits in the |details| field must
1992 // be zero, otherwise the dictionary element requires special handling.
1993 HValue* details_index =
1994 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
1995 details_index->ClearFlag(HValue::kCanOverflow);
1996 HValue* details = Add<HLoadKeyed>(elements, details_index, nullptr, nullptr,
1997 FAST_ELEMENTS);
1998 int details_mask = PropertyDetails::TypeField::kMask;
1999 details = AddUncasted<HBitwise>(Token::BIT_AND, details,
2000 Add<HConstant>(details_mask));
2001 IfBuilder details_compare(this);
2002 details_compare.If<HCompareNumericAndBranch>(
2003 details, graph()->GetConstant0(), Token::EQ);
2004 details_compare.Then();
2005 HValue* result_index =
2006 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
2007 result_index->ClearFlag(HValue::kCanOverflow);
2008 Push(Add<HLoadKeyed>(elements, result_index, nullptr, nullptr,
2009 FAST_ELEMENTS));
2010 details_compare.Else();
2011 Add<HPushArguments>(receiver, key);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002012 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
2013 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002014 details_compare.End();
2015
2016 found_key_match.Else();
2017 found_key_match.JoinContinuation(&return_or_loop_continuation);
2018 }
2019 if_undefined.JoinContinuation(&return_or_loop_continuation);
2020
2021 IfBuilder return_or_loop(this, &return_or_loop_continuation);
2022 return_or_loop.Then();
2023 probe_loop.Break();
2024
2025 return_or_loop.Else();
2026 entry = AddUncasted<HAdd>(entry, count);
2027 entry->ClearFlag(HValue::kCanOverflow);
2028 count = AddUncasted<HAdd>(count, graph()->GetConstant1());
2029 count->ClearFlag(HValue::kCanOverflow);
2030 Push(entry);
2031 Push(count);
2032
2033 probe_loop.EndBody();
2034
2035 return_or_loop.End();
2036
2037 return Pop();
2038}
2039
2040
2041HValue* HGraphBuilder::BuildCreateIterResultObject(HValue* value,
2042 HValue* done) {
2043 NoObservableSideEffectsScope scope(this);
2044
2045 // Allocate the JSIteratorResult object.
2046 HValue* result =
2047 Add<HAllocate>(Add<HConstant>(JSIteratorResult::kSize), HType::JSObject(),
Ben Murdochc5610432016-08-08 18:44:38 +01002048 NOT_TENURED, JS_OBJECT_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002049
2050 // Initialize the JSIteratorResult object.
2051 HValue* native_context = BuildGetNativeContext();
2052 HValue* map = Add<HLoadNamedField>(
2053 native_context, nullptr,
2054 HObjectAccess::ForContextSlot(Context::ITERATOR_RESULT_MAP_INDEX));
2055 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2056 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2057 Add<HStoreNamedField>(result, HObjectAccess::ForPropertiesPointer(),
2058 empty_fixed_array);
2059 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
2060 empty_fixed_array);
2061 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
2062 JSIteratorResult::kValueOffset),
2063 value);
2064 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
2065 JSIteratorResult::kDoneOffset),
2066 done);
2067 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2068 return result;
2069}
2070
2071
2072HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
2073 HValue* index,
2074 HValue* input) {
2075 NoObservableSideEffectsScope scope(this);
2076 HConstant* max_length = Add<HConstant>(JSArray::kInitialMaxFastElementArray);
2077 Add<HBoundsCheck>(length, max_length);
2078
2079 // Generate size calculation code here in order to make it dominate
2080 // the JSRegExpResult allocation.
2081 ElementsKind elements_kind = FAST_ELEMENTS;
2082 HValue* size = BuildCalculateElementsSize(elements_kind, length);
2083
2084 // Allocate the JSRegExpResult and the FixedArray in one step.
Ben Murdochc5610432016-08-08 18:44:38 +01002085 HValue* result =
2086 Add<HAllocate>(Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(),
2087 NOT_TENURED, JS_ARRAY_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002088
2089 // Initialize the JSRegExpResult header.
2090 HValue* native_context = Add<HLoadNamedField>(
2091 context(), nullptr,
2092 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
2093 Add<HStoreNamedField>(
2094 result, HObjectAccess::ForMap(),
2095 Add<HLoadNamedField>(
2096 native_context, nullptr,
2097 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
2098 HConstant* empty_fixed_array =
2099 Add<HConstant>(isolate()->factory()->empty_fixed_array());
2100 Add<HStoreNamedField>(
2101 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
2102 empty_fixed_array);
2103 Add<HStoreNamedField>(
2104 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
2105 empty_fixed_array);
2106 Add<HStoreNamedField>(
2107 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
2108
2109 // Initialize the additional fields.
2110 Add<HStoreNamedField>(
2111 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
2112 index);
2113 Add<HStoreNamedField>(
2114 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
2115 input);
2116
2117 // Allocate and initialize the elements header.
2118 HAllocate* elements = BuildAllocateElements(elements_kind, size);
2119 BuildInitializeElementsHeader(elements, elements_kind, length);
2120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002121 Add<HStoreNamedField>(
2122 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
2123 elements);
2124
2125 // Initialize the elements contents with undefined.
2126 BuildFillElementsWithValue(
2127 elements, elements_kind, graph()->GetConstant0(), length,
2128 graph()->GetConstantUndefined());
2129
2130 return result;
2131}
2132
2133
2134HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
2135 NoObservableSideEffectsScope scope(this);
2136
2137 // Convert constant numbers at compile time.
2138 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
2139 Handle<Object> number = HConstant::cast(object)->handle(isolate());
2140 Handle<String> result = isolate()->factory()->NumberToString(number);
2141 return Add<HConstant>(result);
2142 }
2143
2144 // Create a joinable continuation.
2145 HIfContinuation found(graph()->CreateBasicBlock(),
2146 graph()->CreateBasicBlock());
2147
2148 // Load the number string cache.
2149 HValue* number_string_cache =
2150 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
2151
2152 // Make the hash mask from the length of the number string cache. It
2153 // contains two elements (number and string) for each cache entry.
2154 HValue* mask = AddLoadFixedArrayLength(number_string_cache);
2155 mask->set_type(HType::Smi());
2156 mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
2157 mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
2158
2159 // Check whether object is a smi.
2160 IfBuilder if_objectissmi(this);
2161 if_objectissmi.If<HIsSmiAndBranch>(object);
2162 if_objectissmi.Then();
2163 {
2164 // Compute hash for smi similar to smi_get_hash().
2165 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
2166
2167 // Load the key.
2168 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
2169 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, nullptr,
2170 nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2171
2172 // Check if object == key.
2173 IfBuilder if_objectiskey(this);
2174 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
2175 if_objectiskey.Then();
2176 {
2177 // Make the key_index available.
2178 Push(key_index);
2179 }
2180 if_objectiskey.JoinContinuation(&found);
2181 }
2182 if_objectissmi.Else();
2183 {
2184 if (type->Is(Type::SignedSmall())) {
2185 if_objectissmi.Deopt(Deoptimizer::kExpectedSmi);
2186 } else {
2187 // Check if the object is a heap number.
2188 IfBuilder if_objectisnumber(this);
2189 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
2190 object, isolate()->factory()->heap_number_map());
2191 if_objectisnumber.Then();
2192 {
2193 // Compute hash for heap number similar to double_get_hash().
2194 HValue* low = Add<HLoadNamedField>(
2195 object, objectisnumber,
2196 HObjectAccess::ForHeapNumberValueLowestBits());
2197 HValue* high = Add<HLoadNamedField>(
2198 object, objectisnumber,
2199 HObjectAccess::ForHeapNumberValueHighestBits());
2200 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
2201 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
2202
2203 // Load the key.
2204 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
2205 HValue* key =
2206 Add<HLoadKeyed>(number_string_cache, key_index, nullptr, nullptr,
2207 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2208
2209 // Check if the key is a heap number and compare it with the object.
2210 IfBuilder if_keyisnotsmi(this);
2211 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
2212 if_keyisnotsmi.Then();
2213 {
2214 IfBuilder if_keyisheapnumber(this);
2215 if_keyisheapnumber.If<HCompareMap>(
2216 key, isolate()->factory()->heap_number_map());
2217 if_keyisheapnumber.Then();
2218 {
2219 // Check if values of key and object match.
2220 IfBuilder if_keyeqobject(this);
2221 if_keyeqobject.If<HCompareNumericAndBranch>(
2222 Add<HLoadNamedField>(key, keyisnotsmi,
2223 HObjectAccess::ForHeapNumberValue()),
2224 Add<HLoadNamedField>(object, objectisnumber,
2225 HObjectAccess::ForHeapNumberValue()),
2226 Token::EQ);
2227 if_keyeqobject.Then();
2228 {
2229 // Make the key_index available.
2230 Push(key_index);
2231 }
2232 if_keyeqobject.JoinContinuation(&found);
2233 }
2234 if_keyisheapnumber.JoinContinuation(&found);
2235 }
2236 if_keyisnotsmi.JoinContinuation(&found);
2237 }
2238 if_objectisnumber.Else();
2239 {
2240 if (type->Is(Type::Number())) {
2241 if_objectisnumber.Deopt(Deoptimizer::kExpectedHeapNumber);
2242 }
2243 }
2244 if_objectisnumber.JoinContinuation(&found);
2245 }
2246 }
2247 if_objectissmi.JoinContinuation(&found);
2248
2249 // Check for cache hit.
2250 IfBuilder if_found(this, &found);
2251 if_found.Then();
2252 {
2253 // Count number to string operation in native code.
2254 AddIncrementCounter(isolate()->counters()->number_to_string_native());
2255
2256 // Load the value in case of cache hit.
2257 HValue* key_index = Pop();
2258 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
2259 Push(Add<HLoadKeyed>(number_string_cache, value_index, nullptr, nullptr,
2260 FAST_ELEMENTS, ALLOW_RETURN_HOLE));
2261 }
2262 if_found.Else();
2263 {
2264 // Cache miss, fallback to runtime.
2265 Add<HPushArguments>(object);
2266 Push(Add<HCallRuntime>(
2267 Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
2268 1));
2269 }
2270 if_found.End();
2271
2272 return Pop();
2273}
2274
Ben Murdoch097c5b22016-05-18 11:27:45 +01002275HValue* HGraphBuilder::BuildToNumber(HValue* input) {
2276 if (input->type().IsTaggedNumber()) {
2277 return input;
2278 }
2279 Callable callable = CodeFactory::ToNumber(isolate());
2280 HValue* stub = Add<HConstant>(callable.code());
2281 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +01002282 HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
2283 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002284 instr->set_type(HType::TaggedNumber());
2285 return instr;
2286}
2287
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002288
2289HValue* HGraphBuilder::BuildToObject(HValue* receiver) {
2290 NoObservableSideEffectsScope scope(this);
2291
2292 // Create a joinable continuation.
2293 HIfContinuation wrap(graph()->CreateBasicBlock(),
2294 graph()->CreateBasicBlock());
2295
2296 // Determine the proper global constructor function required to wrap
2297 // {receiver} into a JSValue, unless {receiver} is already a {JSReceiver}, in
2298 // which case we just return it. Deopts to Runtime::kToObject if {receiver}
2299 // is undefined or null.
2300 IfBuilder receiver_is_smi(this);
2301 receiver_is_smi.If<HIsSmiAndBranch>(receiver);
2302 receiver_is_smi.Then();
2303 {
2304 // Use global Number function.
2305 Push(Add<HConstant>(Context::NUMBER_FUNCTION_INDEX));
2306 }
2307 receiver_is_smi.Else();
2308 {
2309 // Determine {receiver} map and instance type.
2310 HValue* receiver_map =
2311 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2312 HValue* receiver_instance_type = Add<HLoadNamedField>(
2313 receiver_map, nullptr, HObjectAccess::ForMapInstanceType());
2314
2315 // First check whether {receiver} is already a spec object (fast case).
2316 IfBuilder receiver_is_not_spec_object(this);
2317 receiver_is_not_spec_object.If<HCompareNumericAndBranch>(
2318 receiver_instance_type, Add<HConstant>(FIRST_JS_RECEIVER_TYPE),
2319 Token::LT);
2320 receiver_is_not_spec_object.Then();
2321 {
2322 // Load the constructor function index from the {receiver} map.
2323 HValue* constructor_function_index = Add<HLoadNamedField>(
2324 receiver_map, nullptr,
2325 HObjectAccess::ForMapInObjectPropertiesOrConstructorFunctionIndex());
2326
2327 // Check if {receiver} has a constructor (null and undefined have no
2328 // constructors, so we deoptimize to the runtime to throw an exception).
2329 IfBuilder constructor_function_index_is_invalid(this);
2330 constructor_function_index_is_invalid.If<HCompareNumericAndBranch>(
2331 constructor_function_index,
2332 Add<HConstant>(Map::kNoConstructorFunctionIndex), Token::EQ);
2333 constructor_function_index_is_invalid.ThenDeopt(
2334 Deoptimizer::kUndefinedOrNullInToObject);
2335 constructor_function_index_is_invalid.End();
2336
2337 // Use the global constructor function.
2338 Push(constructor_function_index);
2339 }
2340 receiver_is_not_spec_object.JoinContinuation(&wrap);
2341 }
2342 receiver_is_smi.JoinContinuation(&wrap);
2343
2344 // Wrap the receiver if necessary.
2345 IfBuilder if_wrap(this, &wrap);
2346 if_wrap.Then();
2347 {
2348 // Grab the constructor function index.
2349 HValue* constructor_index = Pop();
2350
2351 // Load native context.
2352 HValue* native_context = BuildGetNativeContext();
2353
2354 // Determine the initial map for the global constructor.
2355 HValue* constructor = Add<HLoadKeyed>(native_context, constructor_index,
2356 nullptr, nullptr, FAST_ELEMENTS);
2357 HValue* constructor_initial_map = Add<HLoadNamedField>(
2358 constructor, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
2359 // Allocate and initialize a JSValue wrapper.
2360 HValue* value =
2361 BuildAllocate(Add<HConstant>(JSValue::kSize), HType::JSObject(),
2362 JS_VALUE_TYPE, HAllocationMode());
2363 Add<HStoreNamedField>(value, HObjectAccess::ForMap(),
2364 constructor_initial_map);
2365 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2366 Add<HStoreNamedField>(value, HObjectAccess::ForPropertiesPointer(),
2367 empty_fixed_array);
2368 Add<HStoreNamedField>(value, HObjectAccess::ForElementsPointer(),
2369 empty_fixed_array);
2370 Add<HStoreNamedField>(value, HObjectAccess::ForObservableJSObjectOffset(
2371 JSValue::kValueOffset),
2372 receiver);
2373 Push(value);
2374 }
2375 if_wrap.Else();
2376 { Push(receiver); }
2377 if_wrap.End();
2378 return Pop();
2379}
2380
2381
2382HAllocate* HGraphBuilder::BuildAllocate(
2383 HValue* object_size,
2384 HType type,
2385 InstanceType instance_type,
2386 HAllocationMode allocation_mode) {
2387 // Compute the effective allocation size.
2388 HValue* size = object_size;
2389 if (allocation_mode.CreateAllocationMementos()) {
2390 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
2391 size->ClearFlag(HValue::kCanOverflow);
2392 }
2393
2394 // Perform the actual allocation.
2395 HAllocate* object = Add<HAllocate>(
Ben Murdochc5610432016-08-08 18:44:38 +01002396 size, type, allocation_mode.GetPretenureMode(), instance_type,
2397 graph()->GetConstant0(), allocation_mode.feedback_site());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002398
2399 // Setup the allocation memento.
2400 if (allocation_mode.CreateAllocationMementos()) {
2401 BuildCreateAllocationMemento(
2402 object, object_size, allocation_mode.current_site());
2403 }
2404
2405 return object;
2406}
2407
2408
2409HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
2410 HValue* right_length) {
2411 // Compute the combined string length and check against max string length.
2412 HValue* length = AddUncasted<HAdd>(left_length, right_length);
2413 // Check that length <= kMaxLength <=> length < MaxLength + 1.
2414 HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
2415 Add<HBoundsCheck>(length, max_length);
2416 return length;
2417}
2418
2419
2420HValue* HGraphBuilder::BuildCreateConsString(
2421 HValue* length,
2422 HValue* left,
2423 HValue* right,
2424 HAllocationMode allocation_mode) {
2425 // Determine the string instance types.
2426 HInstruction* left_instance_type = AddLoadStringInstanceType(left);
2427 HInstruction* right_instance_type = AddLoadStringInstanceType(right);
2428
2429 // Allocate the cons string object. HAllocate does not care whether we
2430 // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
2431 // CONS_STRING_TYPE here. Below we decide whether the cons string is
2432 // one-byte or two-byte and set the appropriate map.
2433 DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
2434 CONS_ONE_BYTE_STRING_TYPE));
2435 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
2436 HType::String(), CONS_STRING_TYPE,
2437 allocation_mode);
2438
2439 // Compute intersection and difference of instance types.
2440 HValue* anded_instance_types = AddUncasted<HBitwise>(
2441 Token::BIT_AND, left_instance_type, right_instance_type);
2442 HValue* xored_instance_types = AddUncasted<HBitwise>(
2443 Token::BIT_XOR, left_instance_type, right_instance_type);
2444
2445 // We create a one-byte cons string if
2446 // 1. both strings are one-byte, or
2447 // 2. at least one of the strings is two-byte, but happens to contain only
2448 // one-byte characters.
2449 // To do this, we check
2450 // 1. if both strings are one-byte, or if the one-byte data hint is set in
2451 // both strings, or
2452 // 2. if one of the strings has the one-byte data hint set and the other
2453 // string is one-byte.
2454 IfBuilder if_onebyte(this);
2455 STATIC_ASSERT(kOneByteStringTag != 0);
2456 STATIC_ASSERT(kOneByteDataHintMask != 0);
2457 if_onebyte.If<HCompareNumericAndBranch>(
2458 AddUncasted<HBitwise>(
2459 Token::BIT_AND, anded_instance_types,
2460 Add<HConstant>(static_cast<int32_t>(
2461 kStringEncodingMask | kOneByteDataHintMask))),
2462 graph()->GetConstant0(), Token::NE);
2463 if_onebyte.Or();
2464 STATIC_ASSERT(kOneByteStringTag != 0 &&
2465 kOneByteDataHintTag != 0 &&
2466 kOneByteDataHintTag != kOneByteStringTag);
2467 if_onebyte.If<HCompareNumericAndBranch>(
2468 AddUncasted<HBitwise>(
2469 Token::BIT_AND, xored_instance_types,
2470 Add<HConstant>(static_cast<int32_t>(
2471 kOneByteStringTag | kOneByteDataHintTag))),
2472 Add<HConstant>(static_cast<int32_t>(
2473 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
2474 if_onebyte.Then();
2475 {
2476 // We can safely skip the write barrier for storing the map here.
2477 Add<HStoreNamedField>(
2478 result, HObjectAccess::ForMap(),
2479 Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
2480 }
2481 if_onebyte.Else();
2482 {
2483 // We can safely skip the write barrier for storing the map here.
2484 Add<HStoreNamedField>(
2485 result, HObjectAccess::ForMap(),
2486 Add<HConstant>(isolate()->factory()->cons_string_map()));
2487 }
2488 if_onebyte.End();
2489
2490 // Initialize the cons string fields.
2491 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2492 Add<HConstant>(String::kEmptyHashField));
2493 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2494 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
2495 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
2496
2497 // Count the native string addition.
2498 AddIncrementCounter(isolate()->counters()->string_add_native());
2499
2500 return result;
2501}
2502
2503
2504void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
2505 HValue* src_offset,
2506 String::Encoding src_encoding,
2507 HValue* dst,
2508 HValue* dst_offset,
2509 String::Encoding dst_encoding,
2510 HValue* length) {
2511 DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
2512 src_encoding == String::ONE_BYTE_ENCODING);
2513 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
2514 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
2515 {
2516 HValue* src_index = AddUncasted<HAdd>(src_offset, index);
2517 HValue* value =
2518 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
2519 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
2520 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
2521 }
2522 loop.EndBody();
2523}
2524
2525
2526HValue* HGraphBuilder::BuildObjectSizeAlignment(
2527 HValue* unaligned_size, int header_size) {
2528 DCHECK((header_size & kObjectAlignmentMask) == 0);
2529 HValue* size = AddUncasted<HAdd>(
2530 unaligned_size, Add<HConstant>(static_cast<int32_t>(
2531 header_size + kObjectAlignmentMask)));
2532 size->ClearFlag(HValue::kCanOverflow);
2533 return AddUncasted<HBitwise>(
2534 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
2535 ~kObjectAlignmentMask)));
2536}
2537
2538
2539HValue* HGraphBuilder::BuildUncheckedStringAdd(
2540 HValue* left,
2541 HValue* right,
2542 HAllocationMode allocation_mode) {
2543 // Determine the string lengths.
2544 HValue* left_length = AddLoadStringLength(left);
2545 HValue* right_length = AddLoadStringLength(right);
2546
2547 // Compute the combined string length.
2548 HValue* length = BuildAddStringLengths(left_length, right_length);
2549
2550 // Do some manual constant folding here.
2551 if (left_length->IsConstant()) {
2552 HConstant* c_left_length = HConstant::cast(left_length);
2553 DCHECK_NE(0, c_left_length->Integer32Value());
2554 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2555 // The right string contains at least one character.
2556 return BuildCreateConsString(length, left, right, allocation_mode);
2557 }
2558 } else if (right_length->IsConstant()) {
2559 HConstant* c_right_length = HConstant::cast(right_length);
2560 DCHECK_NE(0, c_right_length->Integer32Value());
2561 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2562 // The left string contains at least one character.
2563 return BuildCreateConsString(length, left, right, allocation_mode);
2564 }
2565 }
2566
2567 // Check if we should create a cons string.
2568 IfBuilder if_createcons(this);
2569 if_createcons.If<HCompareNumericAndBranch>(
2570 length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
2571 if_createcons.Then();
2572 {
2573 // Create a cons string.
2574 Push(BuildCreateConsString(length, left, right, allocation_mode));
2575 }
2576 if_createcons.Else();
2577 {
2578 // Determine the string instance types.
2579 HValue* left_instance_type = AddLoadStringInstanceType(left);
2580 HValue* right_instance_type = AddLoadStringInstanceType(right);
2581
2582 // Compute union and difference of instance types.
2583 HValue* ored_instance_types = AddUncasted<HBitwise>(
2584 Token::BIT_OR, left_instance_type, right_instance_type);
2585 HValue* xored_instance_types = AddUncasted<HBitwise>(
2586 Token::BIT_XOR, left_instance_type, right_instance_type);
2587
2588 // Check if both strings have the same encoding and both are
2589 // sequential.
2590 IfBuilder if_sameencodingandsequential(this);
2591 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2592 AddUncasted<HBitwise>(
2593 Token::BIT_AND, xored_instance_types,
2594 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2595 graph()->GetConstant0(), Token::EQ);
2596 if_sameencodingandsequential.And();
2597 STATIC_ASSERT(kSeqStringTag == 0);
2598 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2599 AddUncasted<HBitwise>(
2600 Token::BIT_AND, ored_instance_types,
2601 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
2602 graph()->GetConstant0(), Token::EQ);
2603 if_sameencodingandsequential.Then();
2604 {
2605 HConstant* string_map =
2606 Add<HConstant>(isolate()->factory()->string_map());
2607 HConstant* one_byte_string_map =
2608 Add<HConstant>(isolate()->factory()->one_byte_string_map());
2609
2610 // Determine map and size depending on whether result is one-byte string.
2611 IfBuilder if_onebyte(this);
2612 STATIC_ASSERT(kOneByteStringTag != 0);
2613 if_onebyte.If<HCompareNumericAndBranch>(
2614 AddUncasted<HBitwise>(
2615 Token::BIT_AND, ored_instance_types,
2616 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2617 graph()->GetConstant0(), Token::NE);
2618 if_onebyte.Then();
2619 {
2620 // Allocate sequential one-byte string object.
2621 Push(length);
2622 Push(one_byte_string_map);
2623 }
2624 if_onebyte.Else();
2625 {
2626 // Allocate sequential two-byte string object.
2627 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2628 size->ClearFlag(HValue::kCanOverflow);
2629 size->SetFlag(HValue::kUint32);
2630 Push(size);
2631 Push(string_map);
2632 }
2633 if_onebyte.End();
2634 HValue* map = Pop();
2635
2636 // Calculate the number of bytes needed for the characters in the
2637 // string while observing object alignment.
2638 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2639 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2640
2641 IfBuilder if_size(this);
2642 if_size.If<HCompareNumericAndBranch>(
2643 size, Add<HConstant>(Page::kMaxRegularHeapObjectSize), Token::LT);
2644 if_size.Then();
2645 {
2646 // Allocate the string object. HAllocate does not care whether we pass
2647 // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
2648 HAllocate* result =
2649 BuildAllocate(size, HType::String(), STRING_TYPE, allocation_mode);
2650 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2651
2652 // Initialize the string fields.
2653 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2654 Add<HConstant>(String::kEmptyHashField));
2655 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2656
2657 // Copy characters to the result string.
2658 IfBuilder if_twobyte(this);
2659 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2660 if_twobyte.Then();
2661 {
2662 // Copy characters from the left string.
2663 BuildCopySeqStringChars(
2664 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2665 graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length);
2666
2667 // Copy characters from the right string.
2668 BuildCopySeqStringChars(
2669 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2670 left_length, String::TWO_BYTE_ENCODING, right_length);
2671 }
2672 if_twobyte.Else();
2673 {
2674 // Copy characters from the left string.
2675 BuildCopySeqStringChars(
2676 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2677 graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length);
2678
2679 // Copy characters from the right string.
2680 BuildCopySeqStringChars(
2681 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2682 left_length, String::ONE_BYTE_ENCODING, right_length);
2683 }
2684 if_twobyte.End();
2685
2686 // Count the native string addition.
2687 AddIncrementCounter(isolate()->counters()->string_add_native());
2688
2689 // Return the sequential string.
2690 Push(result);
2691 }
2692 if_size.Else();
2693 {
2694 // Fallback to the runtime to add the two strings. The string has to be
2695 // allocated in LO space.
2696 Add<HPushArguments>(left, right);
2697 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2698 }
2699 if_size.End();
2700 }
2701 if_sameencodingandsequential.Else();
2702 {
2703 // Fallback to the runtime to add the two strings.
2704 Add<HPushArguments>(left, right);
2705 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2706 }
2707 if_sameencodingandsequential.End();
2708 }
2709 if_createcons.End();
2710
2711 return Pop();
2712}
2713
2714
2715HValue* HGraphBuilder::BuildStringAdd(
2716 HValue* left,
2717 HValue* right,
2718 HAllocationMode allocation_mode) {
2719 NoObservableSideEffectsScope no_effects(this);
2720
2721 // Determine string lengths.
2722 HValue* left_length = AddLoadStringLength(left);
2723 HValue* right_length = AddLoadStringLength(right);
2724
2725 // Check if left string is empty.
2726 IfBuilder if_leftempty(this);
2727 if_leftempty.If<HCompareNumericAndBranch>(
2728 left_length, graph()->GetConstant0(), Token::EQ);
2729 if_leftempty.Then();
2730 {
2731 // Count the native string addition.
2732 AddIncrementCounter(isolate()->counters()->string_add_native());
2733
2734 // Just return the right string.
2735 Push(right);
2736 }
2737 if_leftempty.Else();
2738 {
2739 // Check if right string is empty.
2740 IfBuilder if_rightempty(this);
2741 if_rightempty.If<HCompareNumericAndBranch>(
2742 right_length, graph()->GetConstant0(), Token::EQ);
2743 if_rightempty.Then();
2744 {
2745 // Count the native string addition.
2746 AddIncrementCounter(isolate()->counters()->string_add_native());
2747
2748 // Just return the left string.
2749 Push(left);
2750 }
2751 if_rightempty.Else();
2752 {
2753 // Add the two non-empty strings.
2754 Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2755 }
2756 if_rightempty.End();
2757 }
2758 if_leftempty.End();
2759
2760 return Pop();
2761}
2762
2763
2764HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2765 HValue* checked_object,
2766 HValue* key,
2767 HValue* val,
2768 bool is_js_array,
2769 ElementsKind elements_kind,
2770 PropertyAccessType access_type,
2771 LoadKeyedHoleMode load_mode,
2772 KeyedAccessStoreMode store_mode) {
2773 DCHECK(top_info()->IsStub() || checked_object->IsCompareMap() ||
2774 checked_object->IsCheckMaps());
2775 DCHECK(!IsFixedTypedArrayElementsKind(elements_kind) || !is_js_array);
2776 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2777 // on a HElementsTransition instruction. The flag can also be removed if the
2778 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2779 // ElementsKind transitions. Finally, the dependency can be removed for stores
2780 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2781 // generated store code.
2782 if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2783 (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2784 checked_object->ClearDependsOnFlag(kElementsKind);
2785 }
2786
2787 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2788 bool fast_elements = IsFastObjectElementsKind(elements_kind);
2789 HValue* elements = AddLoadElements(checked_object);
2790 if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2791 store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2792 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2793 elements, isolate()->factory()->fixed_array_map());
2794 check_cow_map->ClearDependsOnFlag(kElementsKind);
2795 }
2796 HInstruction* length = NULL;
2797 if (is_js_array) {
2798 length = Add<HLoadNamedField>(
2799 checked_object->ActualValue(), checked_object,
2800 HObjectAccess::ForArrayLength(elements_kind));
2801 } else {
2802 length = AddLoadFixedArrayLength(elements);
2803 }
2804 length->set_type(HType::Smi());
2805 HValue* checked_key = NULL;
2806 if (IsFixedTypedArrayElementsKind(elements_kind)) {
2807 checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
2808
2809 HValue* external_pointer = Add<HLoadNamedField>(
2810 elements, nullptr,
2811 HObjectAccess::ForFixedTypedArrayBaseExternalPointer());
2812 HValue* base_pointer = Add<HLoadNamedField>(
2813 elements, nullptr, HObjectAccess::ForFixedTypedArrayBaseBasePointer());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002814 HValue* backing_store = AddUncasted<HAdd>(external_pointer, base_pointer,
2815 AddOfExternalAndTagged);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002816
2817 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2818 NoObservableSideEffectsScope no_effects(this);
2819 IfBuilder length_checker(this);
2820 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2821 length_checker.Then();
2822 IfBuilder negative_checker(this);
2823 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2824 key, graph()->GetConstant0(), Token::GTE);
2825 negative_checker.Then();
2826 HInstruction* result = AddElementAccess(
2827 backing_store, key, val, bounds_check, checked_object->ActualValue(),
2828 elements_kind, access_type);
2829 negative_checker.ElseDeopt(Deoptimizer::kNegativeKeyEncountered);
2830 negative_checker.End();
2831 length_checker.End();
2832 return result;
2833 } else {
2834 DCHECK(store_mode == STANDARD_STORE);
2835 checked_key = Add<HBoundsCheck>(key, length);
2836 return AddElementAccess(backing_store, checked_key, val, checked_object,
2837 checked_object->ActualValue(), elements_kind,
2838 access_type);
2839 }
2840 }
2841 DCHECK(fast_smi_only_elements ||
2842 fast_elements ||
2843 IsFastDoubleElementsKind(elements_kind));
2844
2845 // In case val is stored into a fast smi array, assure that the value is a smi
2846 // before manipulating the backing store. Otherwise the actual store may
2847 // deopt, leaving the backing store in an invalid state.
2848 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2849 !val->type().IsSmi()) {
2850 val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2851 }
2852
2853 if (IsGrowStoreMode(store_mode)) {
2854 NoObservableSideEffectsScope no_effects(this);
2855 Representation representation = HStoreKeyed::RequiredValueRepresentation(
2856 elements_kind, STORE_TO_INITIALIZED_ENTRY);
2857 val = AddUncasted<HForceRepresentation>(val, representation);
2858 elements = BuildCheckForCapacityGrow(checked_object, elements,
2859 elements_kind, length, key,
2860 is_js_array, access_type);
2861 checked_key = key;
2862 } else {
2863 checked_key = Add<HBoundsCheck>(key, length);
2864
2865 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2866 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2867 NoObservableSideEffectsScope no_effects(this);
2868 elements = BuildCopyElementsOnWrite(checked_object, elements,
2869 elements_kind, length);
2870 } else {
2871 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2872 elements, isolate()->factory()->fixed_array_map());
2873 check_cow_map->ClearDependsOnFlag(kElementsKind);
2874 }
2875 }
2876 }
2877 return AddElementAccess(elements, checked_key, val, checked_object, nullptr,
2878 elements_kind, access_type, load_mode);
2879}
2880
2881
2882HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2883 JSArrayBuilder* array_builder,
2884 HValue* length_argument) {
2885 if (length_argument->IsConstant() &&
2886 HConstant::cast(length_argument)->HasSmiValue()) {
2887 int array_length = HConstant::cast(length_argument)->Integer32Value();
2888 if (array_length == 0) {
2889 return array_builder->AllocateEmptyArray();
2890 } else {
2891 return array_builder->AllocateArray(length_argument,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002892 length_argument);
2893 }
2894 }
2895
2896 HValue* constant_zero = graph()->GetConstant0();
2897 HConstant* max_alloc_length =
2898 Add<HConstant>(JSArray::kInitialMaxFastElementArray);
2899 HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2900 max_alloc_length);
2901 IfBuilder if_builder(this);
2902 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2903 Token::EQ);
2904 if_builder.Then();
2905 const int initial_capacity = JSArray::kPreallocatedArrayElements;
2906 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2907 Push(initial_capacity_node); // capacity
2908 Push(constant_zero); // length
2909 if_builder.Else();
2910 if (!(top_info()->IsStub()) &&
2911 IsFastPackedElementsKind(array_builder->kind())) {
2912 // We'll come back later with better (holey) feedback.
2913 if_builder.Deopt(
2914 Deoptimizer::kHoleyArrayDespitePackedElements_kindFeedback);
2915 } else {
2916 Push(checked_length); // capacity
2917 Push(checked_length); // length
2918 }
2919 if_builder.End();
2920
2921 // Figure out total size
2922 HValue* length = Pop();
2923 HValue* capacity = Pop();
Ben Murdochc5610432016-08-08 18:44:38 +01002924 return array_builder->AllocateArray(capacity, length);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002925}
2926
2927
2928HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
2929 HValue* capacity) {
2930 int elements_size = IsFastDoubleElementsKind(kind)
2931 ? kDoubleSize
2932 : kPointerSize;
2933
2934 HConstant* elements_size_value = Add<HConstant>(elements_size);
2935 HInstruction* mul =
2936 HMul::NewImul(isolate(), zone(), context(), capacity->ActualValue(),
2937 elements_size_value);
2938 AddInstruction(mul);
2939 mul->ClearFlag(HValue::kCanOverflow);
2940
2941 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2942
2943 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2944 HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2945 total_size->ClearFlag(HValue::kCanOverflow);
2946 return total_size;
2947}
2948
2949
2950HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
2951 int base_size = JSArray::kSize;
2952 if (mode == TRACK_ALLOCATION_SITE) {
2953 base_size += AllocationMemento::kSize;
2954 }
2955 HConstant* size_in_bytes = Add<HConstant>(base_size);
Ben Murdochc5610432016-08-08 18:44:38 +01002956 return Add<HAllocate>(size_in_bytes, HType::JSArray(), NOT_TENURED,
2957 JS_OBJECT_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002958}
2959
2960
2961HConstant* HGraphBuilder::EstablishElementsAllocationSize(
2962 ElementsKind kind,
2963 int capacity) {
2964 int base_size = IsFastDoubleElementsKind(kind)
2965 ? FixedDoubleArray::SizeFor(capacity)
2966 : FixedArray::SizeFor(capacity);
2967
2968 return Add<HConstant>(base_size);
2969}
2970
2971
2972HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2973 HValue* size_in_bytes) {
2974 InstanceType instance_type = IsFastDoubleElementsKind(kind)
2975 ? FIXED_DOUBLE_ARRAY_TYPE
2976 : FIXED_ARRAY_TYPE;
2977
2978 return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
Ben Murdochc5610432016-08-08 18:44:38 +01002979 instance_type, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002980}
2981
2982
2983void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2984 ElementsKind kind,
2985 HValue* capacity) {
2986 Factory* factory = isolate()->factory();
2987 Handle<Map> map = IsFastDoubleElementsKind(kind)
2988 ? factory->fixed_double_array_map()
2989 : factory->fixed_array_map();
2990
2991 Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
2992 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2993 capacity);
2994}
2995
2996
2997HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind,
2998 HValue* capacity) {
2999 // The HForceRepresentation is to prevent possible deopt on int-smi
3000 // conversion after allocation but before the new object fields are set.
3001 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
3002 HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
3003 HValue* new_array = BuildAllocateElements(kind, size_in_bytes);
3004 BuildInitializeElementsHeader(new_array, kind, capacity);
3005 return new_array;
3006}
3007
3008
3009void HGraphBuilder::BuildJSArrayHeader(HValue* array,
3010 HValue* array_map,
3011 HValue* elements,
3012 AllocationSiteMode mode,
3013 ElementsKind elements_kind,
3014 HValue* allocation_site_payload,
3015 HValue* length_field) {
3016 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
3017
3018 HConstant* empty_fixed_array =
3019 Add<HConstant>(isolate()->factory()->empty_fixed_array());
3020
3021 Add<HStoreNamedField>(
3022 array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
3023
3024 Add<HStoreNamedField>(
3025 array, HObjectAccess::ForElementsPointer(),
3026 elements != NULL ? elements : empty_fixed_array);
3027
3028 Add<HStoreNamedField>(
3029 array, HObjectAccess::ForArrayLength(elements_kind), length_field);
3030
3031 if (mode == TRACK_ALLOCATION_SITE) {
3032 BuildCreateAllocationMemento(
3033 array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
3034 }
3035}
3036
3037
3038HInstruction* HGraphBuilder::AddElementAccess(
3039 HValue* elements, HValue* checked_key, HValue* val, HValue* dependency,
3040 HValue* backing_store_owner, ElementsKind elements_kind,
3041 PropertyAccessType access_type, LoadKeyedHoleMode load_mode) {
3042 if (access_type == STORE) {
3043 DCHECK(val != NULL);
3044 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
3045 val = Add<HClampToUint8>(val);
3046 }
3047 return Add<HStoreKeyed>(elements, checked_key, val, backing_store_owner,
3048 elements_kind, STORE_TO_INITIALIZED_ENTRY);
3049 }
3050
3051 DCHECK(access_type == LOAD);
3052 DCHECK(val == NULL);
3053 HLoadKeyed* load =
3054 Add<HLoadKeyed>(elements, checked_key, dependency, backing_store_owner,
3055 elements_kind, load_mode);
3056 if (elements_kind == UINT32_ELEMENTS) {
3057 graph()->RecordUint32Instruction(load);
3058 }
3059 return load;
3060}
3061
3062
3063HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
3064 HValue* dependency) {
3065 return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
3066}
3067
3068
3069HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
3070 HValue* dependency) {
3071 return Add<HLoadNamedField>(
3072 object, dependency, HObjectAccess::ForElementsPointer());
3073}
3074
3075
3076HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
3077 HValue* array,
3078 HValue* dependency) {
3079 return Add<HLoadNamedField>(
3080 array, dependency, HObjectAccess::ForFixedArrayLength());
3081}
3082
3083
3084HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
3085 ElementsKind kind,
3086 HValue* dependency) {
3087 return Add<HLoadNamedField>(
3088 array, dependency, HObjectAccess::ForArrayLength(kind));
3089}
3090
3091
3092HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
3093 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
3094 graph_->GetConstant1());
3095
3096 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
3097 new_capacity->ClearFlag(HValue::kCanOverflow);
3098
3099 HValue* min_growth = Add<HConstant>(16);
3100
3101 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
3102 new_capacity->ClearFlag(HValue::kCanOverflow);
3103
3104 return new_capacity;
3105}
3106
3107
3108HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
3109 HValue* elements,
3110 ElementsKind kind,
3111 ElementsKind new_kind,
3112 HValue* length,
3113 HValue* new_capacity) {
3114 Add<HBoundsCheck>(new_capacity, Add<HConstant>(
3115 (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
3116 ElementsKindToShiftSize(new_kind)));
3117
3118 HValue* new_elements =
3119 BuildAllocateAndInitializeArray(new_kind, new_capacity);
3120
3121 BuildCopyElements(elements, kind, new_elements,
3122 new_kind, length, new_capacity);
3123
3124 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
3125 new_elements);
3126
3127 return new_elements;
3128}
3129
3130
3131void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
3132 ElementsKind elements_kind,
3133 HValue* from,
3134 HValue* to,
3135 HValue* value) {
3136 if (to == NULL) {
3137 to = AddLoadFixedArrayLength(elements);
3138 }
3139
3140 // Special loop unfolding case
3141 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
3142 kElementLoopUnrollThreshold);
3143 int initial_capacity = -1;
3144 if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
3145 int constant_from = from->GetInteger32Constant();
3146 int constant_to = to->GetInteger32Constant();
3147
3148 if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
3149 initial_capacity = constant_to;
3150 }
3151 }
3152
3153 if (initial_capacity >= 0) {
3154 for (int i = 0; i < initial_capacity; i++) {
3155 HInstruction* key = Add<HConstant>(i);
3156 Add<HStoreKeyed>(elements, key, value, nullptr, elements_kind);
3157 }
3158 } else {
3159 // Carefully loop backwards so that the "from" remains live through the loop
3160 // rather than the to. This often corresponds to keeping length live rather
3161 // then capacity, which helps register allocation, since length is used more
3162 // other than capacity after filling with holes.
3163 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
3164
3165 HValue* key = builder.BeginBody(to, from, Token::GT);
3166
3167 HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
3168 adjusted_key->ClearFlag(HValue::kCanOverflow);
3169
3170 Add<HStoreKeyed>(elements, adjusted_key, value, nullptr, elements_kind);
3171
3172 builder.EndBody();
3173 }
3174}
3175
3176
3177void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
3178 ElementsKind elements_kind,
3179 HValue* from,
3180 HValue* to) {
3181 // Fast elements kinds need to be initialized in case statements below cause a
3182 // garbage collection.
3183
3184 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
3185 ? graph()->GetConstantHole()
3186 : Add<HConstant>(HConstant::kHoleNaN);
3187
3188 // Since we're about to store a hole value, the store instruction below must
3189 // assume an elements kind that supports heap object values.
3190 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
3191 elements_kind = FAST_HOLEY_ELEMENTS;
3192 }
3193
3194 BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
3195}
3196
3197
3198void HGraphBuilder::BuildCopyProperties(HValue* from_properties,
3199 HValue* to_properties, HValue* length,
3200 HValue* capacity) {
3201 ElementsKind kind = FAST_ELEMENTS;
3202
3203 BuildFillElementsWithValue(to_properties, kind, length, capacity,
3204 graph()->GetConstantUndefined());
3205
3206 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
3207
3208 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT);
3209
3210 key = AddUncasted<HSub>(key, graph()->GetConstant1());
3211 key->ClearFlag(HValue::kCanOverflow);
3212
3213 HValue* element =
3214 Add<HLoadKeyed>(from_properties, key, nullptr, nullptr, kind);
3215
3216 Add<HStoreKeyed>(to_properties, key, element, nullptr, kind);
3217
3218 builder.EndBody();
3219}
3220
3221
3222void HGraphBuilder::BuildCopyElements(HValue* from_elements,
3223 ElementsKind from_elements_kind,
3224 HValue* to_elements,
3225 ElementsKind to_elements_kind,
3226 HValue* length,
3227 HValue* capacity) {
3228 int constant_capacity = -1;
3229 if (capacity != NULL &&
3230 capacity->IsConstant() &&
3231 HConstant::cast(capacity)->HasInteger32Value()) {
3232 int constant_candidate = HConstant::cast(capacity)->Integer32Value();
3233 if (constant_candidate <= kElementLoopUnrollThreshold) {
3234 constant_capacity = constant_candidate;
3235 }
3236 }
3237
3238 bool pre_fill_with_holes =
3239 IsFastDoubleElementsKind(from_elements_kind) &&
3240 IsFastObjectElementsKind(to_elements_kind);
3241 if (pre_fill_with_holes) {
3242 // If the copy might trigger a GC, make sure that the FixedArray is
3243 // pre-initialized with holes to make sure that it's always in a
3244 // consistent state.
3245 BuildFillElementsWithHole(to_elements, to_elements_kind,
3246 graph()->GetConstant0(), NULL);
3247 }
3248
3249 if (constant_capacity != -1) {
3250 // Unroll the loop for small elements kinds.
3251 for (int i = 0; i < constant_capacity; i++) {
3252 HValue* key_constant = Add<HConstant>(i);
3253 HInstruction* value = Add<HLoadKeyed>(
3254 from_elements, key_constant, nullptr, nullptr, from_elements_kind);
3255 Add<HStoreKeyed>(to_elements, key_constant, value, nullptr,
3256 to_elements_kind);
3257 }
3258 } else {
3259 if (!pre_fill_with_holes &&
3260 (capacity == NULL || !length->Equals(capacity))) {
3261 BuildFillElementsWithHole(to_elements, to_elements_kind,
3262 length, NULL);
3263 }
3264
3265 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
3266
3267 HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
3268 Token::GT);
3269
3270 key = AddUncasted<HSub>(key, graph()->GetConstant1());
3271 key->ClearFlag(HValue::kCanOverflow);
3272
3273 HValue* element = Add<HLoadKeyed>(from_elements, key, nullptr, nullptr,
3274 from_elements_kind, ALLOW_RETURN_HOLE);
3275
3276 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
3277 IsFastSmiElementsKind(to_elements_kind))
3278 ? FAST_HOLEY_ELEMENTS : to_elements_kind;
3279
3280 if (IsHoleyElementsKind(from_elements_kind) &&
3281 from_elements_kind != to_elements_kind) {
3282 IfBuilder if_hole(this);
3283 if_hole.If<HCompareHoleAndBranch>(element);
3284 if_hole.Then();
3285 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
3286 ? Add<HConstant>(HConstant::kHoleNaN)
3287 : graph()->GetConstantHole();
3288 Add<HStoreKeyed>(to_elements, key, hole_constant, nullptr, kind);
3289 if_hole.Else();
3290 HStoreKeyed* store =
3291 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
3292 store->SetFlag(HValue::kAllowUndefinedAsNaN);
3293 if_hole.End();
3294 } else {
3295 HStoreKeyed* store =
3296 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
3297 store->SetFlag(HValue::kAllowUndefinedAsNaN);
3298 }
3299
3300 builder.EndBody();
3301 }
3302
3303 Counters* counters = isolate()->counters();
3304 AddIncrementCounter(counters->inlined_copied_elements());
3305}
3306
3307
3308HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
3309 HValue* allocation_site,
3310 AllocationSiteMode mode,
3311 ElementsKind kind) {
3312 HAllocate* array = AllocateJSArrayObject(mode);
3313
3314 HValue* map = AddLoadMap(boilerplate);
3315 HValue* elements = AddLoadElements(boilerplate);
3316 HValue* length = AddLoadArrayLength(boilerplate, kind);
3317
3318 BuildJSArrayHeader(array,
3319 map,
3320 elements,
3321 mode,
3322 FAST_ELEMENTS,
3323 allocation_site,
3324 length);
3325 return array;
3326}
3327
3328
3329HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
3330 HValue* allocation_site,
3331 AllocationSiteMode mode) {
3332 HAllocate* array = AllocateJSArrayObject(mode);
3333
3334 HValue* map = AddLoadMap(boilerplate);
3335
3336 BuildJSArrayHeader(array,
3337 map,
3338 NULL, // set elements to empty fixed array
3339 mode,
3340 FAST_ELEMENTS,
3341 allocation_site,
3342 graph()->GetConstant0());
3343 return array;
3344}
3345
3346
3347HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
3348 HValue* allocation_site,
3349 AllocationSiteMode mode,
3350 ElementsKind kind) {
3351 HValue* boilerplate_elements = AddLoadElements(boilerplate);
3352 HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
3353
3354 // Generate size calculation code here in order to make it dominate
3355 // the JSArray allocation.
3356 HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
3357
3358 // Create empty JSArray object for now, store elimination should remove
3359 // redundant initialization of elements and length fields and at the same
3360 // time the object will be fully prepared for GC if it happens during
3361 // elements allocation.
3362 HValue* result = BuildCloneShallowArrayEmpty(
3363 boilerplate, allocation_site, mode);
3364
3365 HAllocate* elements = BuildAllocateElements(kind, elements_size);
3366
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003367 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
3368
3369 // The allocation for the cloned array above causes register pressure on
3370 // machines with low register counts. Force a reload of the boilerplate
3371 // elements here to free up a register for the allocation to avoid unnecessary
3372 // spillage.
3373 boilerplate_elements = AddLoadElements(boilerplate);
3374 boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
3375
3376 // Copy the elements array header.
3377 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
3378 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
3379 Add<HStoreNamedField>(
3380 elements, access,
3381 Add<HLoadNamedField>(boilerplate_elements, nullptr, access));
3382 }
3383
3384 // And the result of the length
3385 HValue* length = AddLoadArrayLength(boilerplate, kind);
3386 Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
3387
3388 BuildCopyElements(boilerplate_elements, kind, elements,
3389 kind, length, NULL);
3390 return result;
3391}
3392
3393
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003394void HGraphBuilder::BuildCreateAllocationMemento(
3395 HValue* previous_object,
3396 HValue* previous_object_size,
3397 HValue* allocation_site) {
3398 DCHECK(allocation_site != NULL);
3399 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
3400 previous_object, previous_object_size, HType::HeapObject());
3401 AddStoreMapConstant(
3402 allocation_memento, isolate()->factory()->allocation_memento_map());
3403 Add<HStoreNamedField>(
3404 allocation_memento,
3405 HObjectAccess::ForAllocationMementoSite(),
3406 allocation_site);
3407 if (FLAG_allocation_site_pretenuring) {
3408 HValue* memento_create_count =
3409 Add<HLoadNamedField>(allocation_site, nullptr,
3410 HObjectAccess::ForAllocationSiteOffset(
3411 AllocationSite::kPretenureCreateCountOffset));
3412 memento_create_count = AddUncasted<HAdd>(
3413 memento_create_count, graph()->GetConstant1());
3414 // This smi value is reset to zero after every gc, overflow isn't a problem
3415 // since the counter is bounded by the new space size.
3416 memento_create_count->ClearFlag(HValue::kCanOverflow);
3417 Add<HStoreNamedField>(
3418 allocation_site, HObjectAccess::ForAllocationSiteOffset(
3419 AllocationSite::kPretenureCreateCountOffset), memento_create_count);
3420 }
3421}
3422
3423
3424HInstruction* HGraphBuilder::BuildGetNativeContext() {
3425 return Add<HLoadNamedField>(
3426 context(), nullptr,
3427 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
3428}
3429
3430
3431HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
3432 // Get the global object, then the native context
3433 HInstruction* context = Add<HLoadNamedField>(
3434 closure, nullptr, HObjectAccess::ForFunctionContextPointer());
3435 return Add<HLoadNamedField>(
3436 context, nullptr,
3437 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
3438}
3439
3440
3441HInstruction* HGraphBuilder::BuildGetScriptContext(int context_index) {
3442 HValue* native_context = BuildGetNativeContext();
3443 HValue* script_context_table = Add<HLoadNamedField>(
3444 native_context, nullptr,
3445 HObjectAccess::ForContextSlot(Context::SCRIPT_CONTEXT_TABLE_INDEX));
3446 return Add<HLoadNamedField>(script_context_table, nullptr,
3447 HObjectAccess::ForScriptContext(context_index));
3448}
3449
3450
3451HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) {
3452 HValue* script_context = context();
3453 if (depth != NULL) {
3454 HValue* zero = graph()->GetConstant0();
3455
3456 Push(script_context);
3457 Push(depth);
3458
3459 LoopBuilder loop(this);
3460 loop.BeginBody(2); // Drop script_context and depth from last environment
3461 // to appease live range building without simulates.
3462 depth = Pop();
3463 script_context = Pop();
3464
3465 script_context = Add<HLoadNamedField>(
3466 script_context, nullptr,
3467 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3468 depth = AddUncasted<HSub>(depth, graph()->GetConstant1());
3469 depth->ClearFlag(HValue::kCanOverflow);
3470
3471 IfBuilder if_break(this);
3472 if_break.If<HCompareNumericAndBranch, HValue*>(depth, zero, Token::EQ);
3473 if_break.Then();
3474 {
3475 Push(script_context); // The result.
3476 loop.Break();
3477 }
3478 if_break.Else();
3479 {
3480 Push(script_context);
3481 Push(depth);
3482 }
3483 loop.EndBody();
3484 if_break.End();
3485
3486 script_context = Pop();
3487 } else if (depth_value > 0) {
3488 // Unroll the above loop.
3489 for (int i = 0; i < depth_value; i++) {
3490 script_context = Add<HLoadNamedField>(
3491 script_context, nullptr,
3492 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3493 }
3494 }
3495 return script_context;
3496}
3497
3498
3499HInstruction* HGraphBuilder::BuildGetArrayFunction() {
3500 HInstruction* native_context = BuildGetNativeContext();
3501 HInstruction* index =
3502 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
3503 return Add<HLoadKeyed>(native_context, index, nullptr, nullptr,
3504 FAST_ELEMENTS);
3505}
3506
3507
3508HValue* HGraphBuilder::BuildArrayBufferViewFieldAccessor(HValue* object,
3509 HValue* checked_object,
3510 FieldIndex index) {
3511 NoObservableSideEffectsScope scope(this);
3512 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3513 index.offset(), Representation::Tagged());
3514 HInstruction* buffer = Add<HLoadNamedField>(
3515 object, checked_object, HObjectAccess::ForJSArrayBufferViewBuffer());
3516 HInstruction* field = Add<HLoadNamedField>(object, checked_object, access);
3517
3518 HInstruction* flags = Add<HLoadNamedField>(
3519 buffer, nullptr, HObjectAccess::ForJSArrayBufferBitField());
3520 HValue* was_neutered_mask =
3521 Add<HConstant>(1 << JSArrayBuffer::WasNeutered::kShift);
3522 HValue* was_neutered_test =
3523 AddUncasted<HBitwise>(Token::BIT_AND, flags, was_neutered_mask);
3524
3525 IfBuilder if_was_neutered(this);
3526 if_was_neutered.If<HCompareNumericAndBranch>(
3527 was_neutered_test, graph()->GetConstant0(), Token::NE);
3528 if_was_neutered.Then();
3529 Push(graph()->GetConstant0());
3530 if_was_neutered.Else();
3531 Push(field);
3532 if_was_neutered.End();
3533
3534 return Pop();
3535}
3536
3537
3538HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3539 ElementsKind kind,
3540 HValue* allocation_site_payload,
3541 HValue* constructor_function,
3542 AllocationSiteOverrideMode override_mode) :
3543 builder_(builder),
3544 kind_(kind),
3545 allocation_site_payload_(allocation_site_payload),
3546 constructor_function_(constructor_function) {
3547 DCHECK(!allocation_site_payload->IsConstant() ||
3548 HConstant::cast(allocation_site_payload)->handle(
3549 builder_->isolate())->IsAllocationSite());
3550 mode_ = override_mode == DISABLE_ALLOCATION_SITES
3551 ? DONT_TRACK_ALLOCATION_SITE
3552 : AllocationSite::GetMode(kind);
3553}
3554
3555
3556HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3557 ElementsKind kind,
3558 HValue* constructor_function) :
3559 builder_(builder),
3560 kind_(kind),
3561 mode_(DONT_TRACK_ALLOCATION_SITE),
3562 allocation_site_payload_(NULL),
3563 constructor_function_(constructor_function) {
3564}
3565
3566
3567HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
3568 if (!builder()->top_info()->IsStub()) {
3569 // A constant map is fine.
3570 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
3571 builder()->isolate());
3572 return builder()->Add<HConstant>(map);
3573 }
3574
3575 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
3576 // No need for a context lookup if the kind_ matches the initial
3577 // map, because we can just load the map in that case.
3578 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3579 return builder()->Add<HLoadNamedField>(constructor_function_, nullptr,
3580 access);
3581 }
3582
3583 // TODO(mvstanton): we should always have a constructor function if we
3584 // are creating a stub.
3585 HInstruction* native_context = constructor_function_ != NULL
3586 ? builder()->BuildGetNativeContext(constructor_function_)
3587 : builder()->BuildGetNativeContext();
3588
3589 HObjectAccess access =
3590 HObjectAccess::ForContextSlot(Context::ArrayMapIndex(kind_));
3591 return builder()->Add<HLoadNamedField>(native_context, nullptr, access);
3592}
3593
3594
3595HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
3596 // Find the map near the constructor function
3597 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3598 return builder()->Add<HLoadNamedField>(constructor_function_, nullptr,
3599 access);
3600}
3601
3602
3603HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
3604 HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
3605 return AllocateArray(capacity,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003606 builder()->graph()->GetConstant0());
3607}
3608
3609
3610HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3611 HValue* capacity,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003612 HValue* length_field,
3613 FillMode fill_mode) {
3614 // These HForceRepresentations are because we store these as fields in the
3615 // objects we construct, and an int32-to-smi HChange could deopt. Accept
3616 // the deopt possibility now, before allocation occurs.
3617 capacity =
3618 builder()->AddUncasted<HForceRepresentation>(capacity,
3619 Representation::Smi());
3620 length_field =
3621 builder()->AddUncasted<HForceRepresentation>(length_field,
3622 Representation::Smi());
3623
3624 // Generate size calculation code here in order to make it dominate
3625 // the JSArray allocation.
3626 HValue* elements_size =
3627 builder()->BuildCalculateElementsSize(kind_, capacity);
3628
3629 // Bail out for large objects.
3630 HValue* max_regular_heap_object_size =
3631 builder()->Add<HConstant>(Page::kMaxRegularHeapObjectSize);
3632 builder()->Add<HBoundsCheck>(elements_size, max_regular_heap_object_size);
3633
3634 // Allocate (dealing with failure appropriately)
3635 HAllocate* array_object = builder()->AllocateJSArrayObject(mode_);
3636
3637 // Fill in the fields: map, properties, length
3638 HValue* map;
3639 if (allocation_site_payload_ == NULL) {
3640 map = EmitInternalMapCode();
3641 } else {
3642 map = EmitMapCode();
3643 }
3644
3645 builder()->BuildJSArrayHeader(array_object,
3646 map,
3647 NULL, // set elements to empty fixed array
3648 mode_,
3649 kind_,
3650 allocation_site_payload_,
3651 length_field);
3652
3653 // Allocate and initialize the elements
3654 elements_location_ = builder()->BuildAllocateElements(kind_, elements_size);
3655
3656 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
3657
3658 // Set the elements
3659 builder()->Add<HStoreNamedField>(
3660 array_object, HObjectAccess::ForElementsPointer(), elements_location_);
3661
3662 if (fill_mode == FILL_WITH_HOLE) {
3663 builder()->BuildFillElementsWithHole(elements_location_, kind_,
3664 graph()->GetConstant0(), capacity);
3665 }
3666
3667 return array_object;
3668}
3669
3670
3671HValue* HGraphBuilder::AddLoadJSBuiltin(int context_index) {
3672 HValue* native_context = BuildGetNativeContext();
3673 HObjectAccess function_access = HObjectAccess::ForContextSlot(context_index);
3674 return Add<HLoadNamedField>(native_context, nullptr, function_access);
3675}
3676
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003677HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
Ben Murdoch097c5b22016-05-18 11:27:45 +01003678 : HGraphBuilder(info, CallInterfaceDescriptor()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003679 function_state_(NULL),
Ben Murdochda12d292016-06-02 14:46:10 +01003680 initial_function_state_(this, info, NORMAL_RETURN, 0,
3681 TailCallMode::kAllow),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003682 ast_context_(NULL),
3683 break_scope_(NULL),
3684 inlined_count_(0),
3685 globals_(10, info->zone()),
Ben Murdochc5610432016-08-08 18:44:38 +01003686 osr_(new (info->zone()) HOsrBuilder(this)),
3687 bounds_(info->zone()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003688 // This is not initialized in the initializer list because the
3689 // constructor for the initial state relies on function_state_ == NULL
3690 // to know it's the initial state.
3691 function_state_ = &initial_function_state_;
3692 InitializeAstVisitor(info->isolate());
3693 if (top_info()->is_tracking_positions()) {
3694 SetSourcePosition(info->shared_info()->start_position());
3695 }
3696}
3697
3698
3699HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3700 HBasicBlock* second,
3701 BailoutId join_id) {
3702 if (first == NULL) {
3703 return second;
3704 } else if (second == NULL) {
3705 return first;
3706 } else {
3707 HBasicBlock* join_block = graph()->CreateBasicBlock();
3708 Goto(first, join_block);
3709 Goto(second, join_block);
3710 join_block->SetJoinId(join_id);
3711 return join_block;
3712 }
3713}
3714
3715
3716HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
3717 HBasicBlock* exit_block,
3718 HBasicBlock* continue_block) {
3719 if (continue_block != NULL) {
3720 if (exit_block != NULL) Goto(exit_block, continue_block);
3721 continue_block->SetJoinId(statement->ContinueId());
3722 return continue_block;
3723 }
3724 return exit_block;
3725}
3726
3727
3728HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3729 HBasicBlock* loop_entry,
3730 HBasicBlock* body_exit,
3731 HBasicBlock* loop_successor,
3732 HBasicBlock* break_block) {
3733 if (body_exit != NULL) Goto(body_exit, loop_entry);
3734 loop_entry->PostProcessLoopHeader(statement);
3735 if (break_block != NULL) {
3736 if (loop_successor != NULL) Goto(loop_successor, break_block);
3737 break_block->SetJoinId(statement->ExitId());
3738 return break_block;
3739 }
3740 return loop_successor;
3741}
3742
3743
3744// Build a new loop header block and set it as the current block.
3745HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3746 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3747 Goto(loop_entry);
3748 set_current_block(loop_entry);
3749 return loop_entry;
3750}
3751
3752
3753HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3754 IterationStatement* statement) {
Ben Murdochda12d292016-06-02 14:46:10 +01003755 HBasicBlock* loop_entry;
3756
3757 if (osr()->HasOsrEntryAt(statement)) {
3758 loop_entry = osr()->BuildOsrLoopEntry(statement);
3759 if (function_state()->IsInsideDoExpressionScope()) {
3760 Bailout(kDoExpressionUnmodelable);
3761 }
3762 } else {
3763 loop_entry = BuildLoopEntry();
3764 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003765 return loop_entry;
3766}
3767
3768
3769void HBasicBlock::FinishExit(HControlInstruction* instruction,
3770 SourcePosition position) {
3771 Finish(instruction, position);
3772 ClearEnvironment();
3773}
3774
3775
3776std::ostream& operator<<(std::ostream& os, const HBasicBlock& b) {
3777 return os << "B" << b.block_id();
3778}
3779
Ben Murdoch097c5b22016-05-18 11:27:45 +01003780HGraph::HGraph(CompilationInfo* info, CallInterfaceDescriptor descriptor)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003781 : isolate_(info->isolate()),
3782 next_block_id_(0),
3783 entry_block_(NULL),
3784 blocks_(8, info->zone()),
3785 values_(16, info->zone()),
3786 phi_list_(NULL),
3787 uint32_instructions_(NULL),
3788 osr_(NULL),
3789 info_(info),
Ben Murdoch097c5b22016-05-18 11:27:45 +01003790 descriptor_(descriptor),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003791 zone_(info->zone()),
Ben Murdochc5610432016-08-08 18:44:38 +01003792 allow_code_motion_(false),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003793 use_optimistic_licm_(false),
3794 depends_on_empty_array_proto_elements_(false),
3795 type_change_checksum_(0),
3796 maximum_environment_size_(0),
3797 no_side_effects_scope_count_(0),
Ben Murdochc5610432016-08-08 18:44:38 +01003798 disallow_adding_new_values_(false),
3799 inlined_function_infos_(info->zone()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003800 if (info->IsStub()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003801 // For stubs, explicitly add the context to the environment.
3802 start_environment_ = new (zone_)
3803 HEnvironment(zone_, descriptor.GetRegisterParameterCount() + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003804 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003805 start_environment_ =
3806 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3807 }
3808 start_environment_->set_ast_id(BailoutId::FunctionContext());
3809 entry_block_ = CreateBasicBlock();
3810 entry_block_->SetInitialEnvironment(start_environment_);
3811}
3812
3813
3814HBasicBlock* HGraph::CreateBasicBlock() {
3815 HBasicBlock* result = new(zone()) HBasicBlock(this);
3816 blocks_.Add(result, zone());
3817 return result;
3818}
3819
3820
3821void HGraph::FinalizeUniqueness() {
3822 DisallowHeapAllocation no_gc;
3823 for (int i = 0; i < blocks()->length(); ++i) {
3824 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3825 it.Current()->FinalizeUniqueness();
3826 }
3827 }
3828}
3829
3830
3831int HGraph::SourcePositionToScriptPosition(SourcePosition pos) {
3832 return (FLAG_hydrogen_track_positions && !pos.IsUnknown())
Ben Murdochc5610432016-08-08 18:44:38 +01003833 ? inlined_function_infos_.at(pos.inlining_id()).start_position +
3834 pos.position()
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003835 : pos.raw();
3836}
3837
3838
3839// Block ordering was implemented with two mutually recursive methods,
3840// HGraph::Postorder and HGraph::PostorderLoopBlocks.
3841// The recursion could lead to stack overflow so the algorithm has been
3842// implemented iteratively.
3843// At a high level the algorithm looks like this:
3844//
3845// Postorder(block, loop_header) : {
3846// if (block has already been visited or is of another loop) return;
3847// mark block as visited;
3848// if (block is a loop header) {
3849// VisitLoopMembers(block, loop_header);
3850// VisitSuccessorsOfLoopHeader(block);
3851// } else {
3852// VisitSuccessors(block)
3853// }
3854// put block in result list;
3855// }
3856//
3857// VisitLoopMembers(block, outer_loop_header) {
3858// foreach (block b in block loop members) {
3859// VisitSuccessorsOfLoopMember(b, outer_loop_header);
3860// if (b is loop header) VisitLoopMembers(b);
3861// }
3862// }
3863//
3864// VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3865// foreach (block b in block successors) Postorder(b, outer_loop_header)
3866// }
3867//
3868// VisitSuccessorsOfLoopHeader(block) {
3869// foreach (block b in block successors) Postorder(b, block)
3870// }
3871//
3872// VisitSuccessors(block, loop_header) {
3873// foreach (block b in block successors) Postorder(b, loop_header)
3874// }
3875//
3876// The ordering is started calling Postorder(entry, NULL).
3877//
3878// Each instance of PostorderProcessor represents the "stack frame" of the
3879// recursion, and particularly keeps the state of the loop (iteration) of the
3880// "Visit..." function it represents.
3881// To recycle memory we keep all the frames in a double linked list but
3882// this means that we cannot use constructors to initialize the frames.
3883//
3884class PostorderProcessor : public ZoneObject {
3885 public:
3886 // Back link (towards the stack bottom).
3887 PostorderProcessor* parent() {return father_; }
3888 // Forward link (towards the stack top).
3889 PostorderProcessor* child() {return child_; }
3890 HBasicBlock* block() { return block_; }
3891 HLoopInformation* loop() { return loop_; }
3892 HBasicBlock* loop_header() { return loop_header_; }
3893
3894 static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3895 HBasicBlock* block) {
3896 PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3897 return result->SetupSuccessors(zone, block, NULL);
3898 }
3899
3900 PostorderProcessor* PerformStep(Zone* zone,
3901 ZoneList<HBasicBlock*>* order) {
3902 PostorderProcessor* next =
3903 PerformNonBacktrackingStep(zone, order);
3904 if (next != NULL) {
3905 return next;
3906 } else {
3907 return Backtrack(zone, order);
3908 }
3909 }
3910
3911 private:
3912 explicit PostorderProcessor(PostorderProcessor* father)
3913 : father_(father), child_(NULL), successor_iterator(NULL) { }
3914
3915 // Each enum value states the cycle whose state is kept by this instance.
3916 enum LoopKind {
3917 NONE,
3918 SUCCESSORS,
3919 SUCCESSORS_OF_LOOP_HEADER,
3920 LOOP_MEMBERS,
3921 SUCCESSORS_OF_LOOP_MEMBER
3922 };
3923
3924 // Each "Setup..." method is like a constructor for a cycle state.
3925 PostorderProcessor* SetupSuccessors(Zone* zone,
3926 HBasicBlock* block,
3927 HBasicBlock* loop_header) {
3928 if (block == NULL || block->IsOrdered() ||
3929 block->parent_loop_header() != loop_header) {
3930 kind_ = NONE;
3931 block_ = NULL;
3932 loop_ = NULL;
3933 loop_header_ = NULL;
3934 return this;
3935 } else {
3936 block_ = block;
3937 loop_ = NULL;
3938 block->MarkAsOrdered();
3939
3940 if (block->IsLoopHeader()) {
3941 kind_ = SUCCESSORS_OF_LOOP_HEADER;
3942 loop_header_ = block;
3943 InitializeSuccessors();
3944 PostorderProcessor* result = Push(zone);
3945 return result->SetupLoopMembers(zone, block, block->loop_information(),
3946 loop_header);
3947 } else {
3948 DCHECK(block->IsFinished());
3949 kind_ = SUCCESSORS;
3950 loop_header_ = loop_header;
3951 InitializeSuccessors();
3952 return this;
3953 }
3954 }
3955 }
3956
3957 PostorderProcessor* SetupLoopMembers(Zone* zone,
3958 HBasicBlock* block,
3959 HLoopInformation* loop,
3960 HBasicBlock* loop_header) {
3961 kind_ = LOOP_MEMBERS;
3962 block_ = block;
3963 loop_ = loop;
3964 loop_header_ = loop_header;
3965 InitializeLoopMembers();
3966 return this;
3967 }
3968
3969 PostorderProcessor* SetupSuccessorsOfLoopMember(
3970 HBasicBlock* block,
3971 HLoopInformation* loop,
3972 HBasicBlock* loop_header) {
3973 kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3974 block_ = block;
3975 loop_ = loop;
3976 loop_header_ = loop_header;
3977 InitializeSuccessors();
3978 return this;
3979 }
3980
3981 // This method "allocates" a new stack frame.
3982 PostorderProcessor* Push(Zone* zone) {
3983 if (child_ == NULL) {
3984 child_ = new(zone) PostorderProcessor(this);
3985 }
3986 return child_;
3987 }
3988
3989 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3990 DCHECK(block_->end()->FirstSuccessor() == NULL ||
3991 order->Contains(block_->end()->FirstSuccessor()) ||
3992 block_->end()->FirstSuccessor()->IsLoopHeader());
3993 DCHECK(block_->end()->SecondSuccessor() == NULL ||
3994 order->Contains(block_->end()->SecondSuccessor()) ||
3995 block_->end()->SecondSuccessor()->IsLoopHeader());
3996 order->Add(block_, zone);
3997 }
3998
3999 // This method is the basic block to walk up the stack.
4000 PostorderProcessor* Pop(Zone* zone,
4001 ZoneList<HBasicBlock*>* order) {
4002 switch (kind_) {
4003 case SUCCESSORS:
4004 case SUCCESSORS_OF_LOOP_HEADER:
4005 ClosePostorder(order, zone);
4006 return father_;
4007 case LOOP_MEMBERS:
4008 return father_;
4009 case SUCCESSORS_OF_LOOP_MEMBER:
4010 if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
4011 // In this case we need to perform a LOOP_MEMBERS cycle so we
4012 // initialize it and return this instead of father.
4013 return SetupLoopMembers(zone, block(),
4014 block()->loop_information(), loop_header_);
4015 } else {
4016 return father_;
4017 }
4018 case NONE:
4019 return father_;
4020 }
4021 UNREACHABLE();
4022 return NULL;
4023 }
4024
4025 // Walks up the stack.
4026 PostorderProcessor* Backtrack(Zone* zone,
4027 ZoneList<HBasicBlock*>* order) {
4028 PostorderProcessor* parent = Pop(zone, order);
4029 while (parent != NULL) {
4030 PostorderProcessor* next =
4031 parent->PerformNonBacktrackingStep(zone, order);
4032 if (next != NULL) {
4033 return next;
4034 } else {
4035 parent = parent->Pop(zone, order);
4036 }
4037 }
4038 return NULL;
4039 }
4040
4041 PostorderProcessor* PerformNonBacktrackingStep(
4042 Zone* zone,
4043 ZoneList<HBasicBlock*>* order) {
4044 HBasicBlock* next_block;
4045 switch (kind_) {
4046 case SUCCESSORS:
4047 next_block = AdvanceSuccessors();
4048 if (next_block != NULL) {
4049 PostorderProcessor* result = Push(zone);
4050 return result->SetupSuccessors(zone, next_block, loop_header_);
4051 }
4052 break;
4053 case SUCCESSORS_OF_LOOP_HEADER:
4054 next_block = AdvanceSuccessors();
4055 if (next_block != NULL) {
4056 PostorderProcessor* result = Push(zone);
4057 return result->SetupSuccessors(zone, next_block, block());
4058 }
4059 break;
4060 case LOOP_MEMBERS:
4061 next_block = AdvanceLoopMembers();
4062 if (next_block != NULL) {
4063 PostorderProcessor* result = Push(zone);
4064 return result->SetupSuccessorsOfLoopMember(next_block,
4065 loop_, loop_header_);
4066 }
4067 break;
4068 case SUCCESSORS_OF_LOOP_MEMBER:
4069 next_block = AdvanceSuccessors();
4070 if (next_block != NULL) {
4071 PostorderProcessor* result = Push(zone);
4072 return result->SetupSuccessors(zone, next_block, loop_header_);
4073 }
4074 break;
4075 case NONE:
4076 return NULL;
4077 }
4078 return NULL;
4079 }
4080
4081 // The following two methods implement a "foreach b in successors" cycle.
4082 void InitializeSuccessors() {
4083 loop_index = 0;
4084 loop_length = 0;
4085 successor_iterator = HSuccessorIterator(block_->end());
4086 }
4087
4088 HBasicBlock* AdvanceSuccessors() {
4089 if (!successor_iterator.Done()) {
4090 HBasicBlock* result = successor_iterator.Current();
4091 successor_iterator.Advance();
4092 return result;
4093 }
4094 return NULL;
4095 }
4096
4097 // The following two methods implement a "foreach b in loop members" cycle.
4098 void InitializeLoopMembers() {
4099 loop_index = 0;
4100 loop_length = loop_->blocks()->length();
4101 }
4102
4103 HBasicBlock* AdvanceLoopMembers() {
4104 if (loop_index < loop_length) {
4105 HBasicBlock* result = loop_->blocks()->at(loop_index);
4106 loop_index++;
4107 return result;
4108 } else {
4109 return NULL;
4110 }
4111 }
4112
4113 LoopKind kind_;
4114 PostorderProcessor* father_;
4115 PostorderProcessor* child_;
4116 HLoopInformation* loop_;
4117 HBasicBlock* block_;
4118 HBasicBlock* loop_header_;
4119 int loop_index;
4120 int loop_length;
4121 HSuccessorIterator successor_iterator;
4122};
4123
4124
4125void HGraph::OrderBlocks() {
4126 CompilationPhase phase("H_Block ordering", info());
4127
4128#ifdef DEBUG
4129 // Initially the blocks must not be ordered.
4130 for (int i = 0; i < blocks_.length(); ++i) {
4131 DCHECK(!blocks_[i]->IsOrdered());
4132 }
4133#endif
4134
4135 PostorderProcessor* postorder =
4136 PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
4137 blocks_.Rewind(0);
4138 while (postorder) {
4139 postorder = postorder->PerformStep(zone(), &blocks_);
4140 }
4141
4142#ifdef DEBUG
4143 // Now all blocks must be marked as ordered.
4144 for (int i = 0; i < blocks_.length(); ++i) {
4145 DCHECK(blocks_[i]->IsOrdered());
4146 }
4147#endif
4148
4149 // Reverse block list and assign block IDs.
4150 for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
4151 HBasicBlock* bi = blocks_[i];
4152 HBasicBlock* bj = blocks_[j];
4153 bi->set_block_id(j);
4154 bj->set_block_id(i);
4155 blocks_[i] = bj;
4156 blocks_[j] = bi;
4157 }
4158}
4159
4160
4161void HGraph::AssignDominators() {
4162 HPhase phase("H_Assign dominators", this);
4163 for (int i = 0; i < blocks_.length(); ++i) {
4164 HBasicBlock* block = blocks_[i];
4165 if (block->IsLoopHeader()) {
4166 // Only the first predecessor of a loop header is from outside the loop.
4167 // All others are back edges, and thus cannot dominate the loop header.
4168 block->AssignCommonDominator(block->predecessors()->first());
4169 block->AssignLoopSuccessorDominators();
4170 } else {
4171 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
4172 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
4173 }
4174 }
4175 }
4176}
4177
4178
4179bool HGraph::CheckArgumentsPhiUses() {
4180 int block_count = blocks_.length();
4181 for (int i = 0; i < block_count; ++i) {
4182 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4183 HPhi* phi = blocks_[i]->phis()->at(j);
4184 // We don't support phi uses of arguments for now.
4185 if (phi->CheckFlag(HValue::kIsArguments)) return false;
4186 }
4187 }
4188 return true;
4189}
4190
4191
4192bool HGraph::CheckConstPhiUses() {
4193 int block_count = blocks_.length();
4194 for (int i = 0; i < block_count; ++i) {
4195 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4196 HPhi* phi = blocks_[i]->phis()->at(j);
4197 // Check for the hole value (from an uninitialized const).
4198 for (int k = 0; k < phi->OperandCount(); k++) {
4199 if (phi->OperandAt(k) == GetConstantHole()) return false;
4200 }
4201 }
4202 }
4203 return true;
4204}
4205
4206
4207void HGraph::CollectPhis() {
4208 int block_count = blocks_.length();
4209 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
4210 for (int i = 0; i < block_count; ++i) {
4211 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4212 HPhi* phi = blocks_[i]->phis()->at(j);
4213 phi_list_->Add(phi, zone());
4214 }
4215 }
4216}
4217
4218
4219// Implementation of utility class to encapsulate the translation state for
4220// a (possibly inlined) function.
4221FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
4222 CompilationInfo* info, InliningKind inlining_kind,
Ben Murdochda12d292016-06-02 14:46:10 +01004223 int inlining_id, TailCallMode tail_call_mode)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004224 : owner_(owner),
4225 compilation_info_(info),
4226 call_context_(NULL),
4227 inlining_kind_(inlining_kind),
Ben Murdochda12d292016-06-02 14:46:10 +01004228 tail_call_mode_(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004229 function_return_(NULL),
4230 test_context_(NULL),
4231 entry_(NULL),
4232 arguments_object_(NULL),
4233 arguments_elements_(NULL),
4234 inlining_id_(inlining_id),
4235 outer_source_position_(SourcePosition::Unknown()),
Ben Murdochda12d292016-06-02 14:46:10 +01004236 do_expression_scope_count_(0),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004237 outer_(owner->function_state()) {
4238 if (outer_ != NULL) {
4239 // State for an inline function.
4240 if (owner->ast_context()->IsTest()) {
4241 HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
4242 HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
4243 if_true->MarkAsInlineReturnTarget(owner->current_block());
4244 if_false->MarkAsInlineReturnTarget(owner->current_block());
4245 TestContext* outer_test_context = TestContext::cast(owner->ast_context());
4246 Expression* cond = outer_test_context->condition();
4247 // The AstContext constructor pushed on the context stack. This newed
4248 // instance is the reason that AstContext can't be BASE_EMBEDDED.
4249 test_context_ = new TestContext(owner, cond, if_true, if_false);
4250 } else {
4251 function_return_ = owner->graph()->CreateBasicBlock();
4252 function_return()->MarkAsInlineReturnTarget(owner->current_block());
4253 }
4254 // Set this after possibly allocating a new TestContext above.
4255 call_context_ = owner->ast_context();
4256 }
4257
4258 // Push on the state stack.
4259 owner->set_function_state(this);
4260
4261 if (compilation_info_->is_tracking_positions()) {
4262 outer_source_position_ = owner->source_position();
4263 owner->EnterInlinedSource(
4264 info->shared_info()->start_position(),
4265 inlining_id);
4266 owner->SetSourcePosition(info->shared_info()->start_position());
4267 }
4268}
4269
4270
4271FunctionState::~FunctionState() {
4272 delete test_context_;
4273 owner_->set_function_state(outer_);
4274
4275 if (compilation_info_->is_tracking_positions()) {
4276 owner_->set_source_position(outer_source_position_);
4277 owner_->EnterInlinedSource(
4278 outer_->compilation_info()->shared_info()->start_position(),
4279 outer_->inlining_id());
4280 }
4281}
4282
4283
4284// Implementation of utility classes to represent an expression's context in
4285// the AST.
4286AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
4287 : owner_(owner),
4288 kind_(kind),
4289 outer_(owner->ast_context()),
4290 typeof_mode_(NOT_INSIDE_TYPEOF) {
4291 owner->set_ast_context(this); // Push.
4292#ifdef DEBUG
Ben Murdochda12d292016-06-02 14:46:10 +01004293 DCHECK_EQ(JS_FUNCTION, owner->environment()->frame_type());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004294 original_length_ = owner->environment()->length();
4295#endif
4296}
4297
4298
4299AstContext::~AstContext() {
4300 owner_->set_ast_context(outer_); // Pop.
4301}
4302
4303
4304EffectContext::~EffectContext() {
Ben Murdochda12d292016-06-02 14:46:10 +01004305 DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004306 (owner()->environment()->length() == original_length_ &&
Ben Murdochda12d292016-06-02 14:46:10 +01004307 (owner()->environment()->frame_type() == JS_FUNCTION ||
4308 owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004309}
4310
4311
4312ValueContext::~ValueContext() {
Ben Murdochda12d292016-06-02 14:46:10 +01004313 DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004314 (owner()->environment()->length() == original_length_ + 1 &&
Ben Murdochda12d292016-06-02 14:46:10 +01004315 (owner()->environment()->frame_type() == JS_FUNCTION ||
4316 owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004317}
4318
4319
4320void EffectContext::ReturnValue(HValue* value) {
4321 // The value is simply ignored.
4322}
4323
4324
4325void ValueContext::ReturnValue(HValue* value) {
4326 // The value is tracked in the bailout environment, and communicated
4327 // through the environment as the result of the expression.
4328 if (value->CheckFlag(HValue::kIsArguments)) {
4329 if (flag_ == ARGUMENTS_FAKED) {
4330 value = owner()->graph()->GetConstantUndefined();
4331 } else if (!arguments_allowed()) {
4332 owner()->Bailout(kBadValueContextForArgumentsValue);
4333 }
4334 }
4335 owner()->Push(value);
4336}
4337
4338
4339void TestContext::ReturnValue(HValue* value) {
4340 BuildBranch(value);
4341}
4342
4343
4344void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4345 DCHECK(!instr->IsControlInstruction());
4346 owner()->AddInstruction(instr);
4347 if (instr->HasObservableSideEffects()) {
4348 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4349 }
4350}
4351
4352
4353void EffectContext::ReturnControl(HControlInstruction* instr,
4354 BailoutId ast_id) {
4355 DCHECK(!instr->HasObservableSideEffects());
4356 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4357 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4358 instr->SetSuccessorAt(0, empty_true);
4359 instr->SetSuccessorAt(1, empty_false);
4360 owner()->FinishCurrentBlock(instr);
4361 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
4362 owner()->set_current_block(join);
4363}
4364
4365
4366void EffectContext::ReturnContinuation(HIfContinuation* continuation,
4367 BailoutId ast_id) {
4368 HBasicBlock* true_branch = NULL;
4369 HBasicBlock* false_branch = NULL;
4370 continuation->Continue(&true_branch, &false_branch);
4371 if (!continuation->IsTrueReachable()) {
4372 owner()->set_current_block(false_branch);
4373 } else if (!continuation->IsFalseReachable()) {
4374 owner()->set_current_block(true_branch);
4375 } else {
4376 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
4377 owner()->set_current_block(join);
4378 }
4379}
4380
4381
4382void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4383 DCHECK(!instr->IsControlInstruction());
4384 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4385 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4386 }
4387 owner()->AddInstruction(instr);
4388 owner()->Push(instr);
4389 if (instr->HasObservableSideEffects()) {
4390 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4391 }
4392}
4393
4394
4395void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4396 DCHECK(!instr->HasObservableSideEffects());
4397 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4398 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4399 }
4400 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
4401 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
4402 instr->SetSuccessorAt(0, materialize_true);
4403 instr->SetSuccessorAt(1, materialize_false);
4404 owner()->FinishCurrentBlock(instr);
4405 owner()->set_current_block(materialize_true);
4406 owner()->Push(owner()->graph()->GetConstantTrue());
4407 owner()->set_current_block(materialize_false);
4408 owner()->Push(owner()->graph()->GetConstantFalse());
4409 HBasicBlock* join =
4410 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4411 owner()->set_current_block(join);
4412}
4413
4414
4415void ValueContext::ReturnContinuation(HIfContinuation* continuation,
4416 BailoutId ast_id) {
4417 HBasicBlock* materialize_true = NULL;
4418 HBasicBlock* materialize_false = NULL;
4419 continuation->Continue(&materialize_true, &materialize_false);
4420 if (continuation->IsTrueReachable()) {
4421 owner()->set_current_block(materialize_true);
4422 owner()->Push(owner()->graph()->GetConstantTrue());
4423 owner()->set_current_block(materialize_true);
4424 }
4425 if (continuation->IsFalseReachable()) {
4426 owner()->set_current_block(materialize_false);
4427 owner()->Push(owner()->graph()->GetConstantFalse());
4428 owner()->set_current_block(materialize_false);
4429 }
4430 if (continuation->TrueAndFalseReachable()) {
4431 HBasicBlock* join =
4432 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4433 owner()->set_current_block(join);
4434 }
4435}
4436
4437
4438void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4439 DCHECK(!instr->IsControlInstruction());
4440 HOptimizedGraphBuilder* builder = owner();
4441 builder->AddInstruction(instr);
4442 // We expect a simulate after every expression with side effects, though
4443 // this one isn't actually needed (and wouldn't work if it were targeted).
4444 if (instr->HasObservableSideEffects()) {
4445 builder->Push(instr);
4446 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4447 builder->Pop();
4448 }
4449 BuildBranch(instr);
4450}
4451
4452
4453void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4454 DCHECK(!instr->HasObservableSideEffects());
4455 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4456 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4457 instr->SetSuccessorAt(0, empty_true);
4458 instr->SetSuccessorAt(1, empty_false);
4459 owner()->FinishCurrentBlock(instr);
4460 owner()->Goto(empty_true, if_true(), owner()->function_state());
4461 owner()->Goto(empty_false, if_false(), owner()->function_state());
4462 owner()->set_current_block(NULL);
4463}
4464
4465
4466void TestContext::ReturnContinuation(HIfContinuation* continuation,
4467 BailoutId ast_id) {
4468 HBasicBlock* true_branch = NULL;
4469 HBasicBlock* false_branch = NULL;
4470 continuation->Continue(&true_branch, &false_branch);
4471 if (continuation->IsTrueReachable()) {
4472 owner()->Goto(true_branch, if_true(), owner()->function_state());
4473 }
4474 if (continuation->IsFalseReachable()) {
4475 owner()->Goto(false_branch, if_false(), owner()->function_state());
4476 }
4477 owner()->set_current_block(NULL);
4478}
4479
4480
4481void TestContext::BuildBranch(HValue* value) {
4482 // We expect the graph to be in edge-split form: there is no edge that
4483 // connects a branch node to a join node. We conservatively ensure that
4484 // property by always adding an empty block on the outgoing edges of this
4485 // branch.
4486 HOptimizedGraphBuilder* builder = owner();
4487 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
4488 builder->Bailout(kArgumentsObjectValueInATestContext);
4489 }
Ben Murdochda12d292016-06-02 14:46:10 +01004490 ToBooleanICStub::Types expected(condition()->to_boolean_types());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004491 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
4492}
4493
4494
4495// HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
4496#define CHECK_BAILOUT(call) \
4497 do { \
4498 call; \
4499 if (HasStackOverflow()) return; \
4500 } while (false)
4501
4502
4503#define CHECK_ALIVE(call) \
4504 do { \
4505 call; \
4506 if (HasStackOverflow() || current_block() == NULL) return; \
4507 } while (false)
4508
4509
4510#define CHECK_ALIVE_OR_RETURN(call, value) \
4511 do { \
4512 call; \
4513 if (HasStackOverflow() || current_block() == NULL) return value; \
4514 } while (false)
4515
4516
4517void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
4518 current_info()->AbortOptimization(reason);
4519 SetStackOverflow();
4520}
4521
4522
4523void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
4524 EffectContext for_effect(this);
4525 Visit(expr);
4526}
4527
4528
4529void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
4530 ArgumentsAllowedFlag flag) {
4531 ValueContext for_value(this, flag);
4532 Visit(expr);
4533}
4534
4535
4536void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
4537 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
4538 for_value.set_typeof_mode(INSIDE_TYPEOF);
4539 Visit(expr);
4540}
4541
4542
4543void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
4544 HBasicBlock* true_block,
4545 HBasicBlock* false_block) {
4546 TestContext for_control(this, expr, true_block, false_block);
4547 Visit(expr);
4548}
4549
4550
4551void HOptimizedGraphBuilder::VisitExpressions(
4552 ZoneList<Expression*>* exprs) {
4553 for (int i = 0; i < exprs->length(); ++i) {
4554 CHECK_ALIVE(VisitForValue(exprs->at(i)));
4555 }
4556}
4557
4558
4559void HOptimizedGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs,
4560 ArgumentsAllowedFlag flag) {
4561 for (int i = 0; i < exprs->length(); ++i) {
4562 CHECK_ALIVE(VisitForValue(exprs->at(i), flag));
4563 }
4564}
4565
4566
4567bool HOptimizedGraphBuilder::BuildGraph() {
4568 if (IsSubclassConstructor(current_info()->literal()->kind())) {
4569 Bailout(kSuperReference);
4570 return false;
4571 }
4572
4573 Scope* scope = current_info()->scope();
4574 SetUpScope(scope);
4575
4576 // Add an edge to the body entry. This is warty: the graph's start
4577 // environment will be used by the Lithium translation as the initial
4578 // environment on graph entry, but it has now been mutated by the
4579 // Hydrogen translation of the instructions in the start block. This
4580 // environment uses values which have not been defined yet. These
4581 // Hydrogen instructions will then be replayed by the Lithium
4582 // translation, so they cannot have an environment effect. The edge to
4583 // the body's entry block (along with some special logic for the start
4584 // block in HInstruction::InsertAfter) seals the start block from
4585 // getting unwanted instructions inserted.
4586 //
4587 // TODO(kmillikin): Fix this. Stop mutating the initial environment.
4588 // Make the Hydrogen instructions in the initial block into Hydrogen
4589 // values (but not instructions), present in the initial environment and
4590 // not replayed by the Lithium translation.
4591 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4592 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4593 Goto(body_entry);
4594 body_entry->SetJoinId(BailoutId::FunctionEntry());
4595 set_current_block(body_entry);
4596
4597 VisitDeclarations(scope->declarations());
4598 Add<HSimulate>(BailoutId::Declarations());
4599
4600 Add<HStackCheck>(HStackCheck::kFunctionEntry);
4601
4602 VisitStatements(current_info()->literal()->body());
4603 if (HasStackOverflow()) return false;
4604
4605 if (current_block() != NULL) {
4606 Add<HReturn>(graph()->GetConstantUndefined());
4607 set_current_block(NULL);
4608 }
4609
4610 // If the checksum of the number of type info changes is the same as the
4611 // last time this function was compiled, then this recompile is likely not
4612 // due to missing/inadequate type feedback, but rather too aggressive
4613 // optimization. Disable optimistic LICM in that case.
4614 Handle<Code> unoptimized_code(current_info()->shared_info()->code());
4615 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
4616 Handle<TypeFeedbackInfo> type_info(
4617 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
4618 int checksum = type_info->own_type_change_checksum();
4619 int composite_checksum = graph()->update_type_change_checksum(checksum);
4620 graph()->set_use_optimistic_licm(
4621 !type_info->matches_inlined_type_change_checksum(composite_checksum));
4622 type_info->set_inlined_type_change_checksum(composite_checksum);
4623
Ben Murdochc5610432016-08-08 18:44:38 +01004624 // Set this predicate early to avoid handle deref during graph optimization.
4625 graph()->set_allow_code_motion(
4626 current_info()->IsStub() ||
4627 current_info()->shared_info()->opt_count() + 1 < FLAG_max_opt_count);
4628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004629 // Perform any necessary OSR-specific cleanups or changes to the graph.
4630 osr()->FinishGraph();
4631
4632 return true;
4633}
4634
4635
4636bool HGraph::Optimize(BailoutReason* bailout_reason) {
4637 OrderBlocks();
4638 AssignDominators();
4639
4640 // We need to create a HConstant "zero" now so that GVN will fold every
4641 // zero-valued constant in the graph together.
4642 // The constant is needed to make idef-based bounds check work: the pass
4643 // evaluates relations with "zero" and that zero cannot be created after GVN.
4644 GetConstant0();
4645
4646#ifdef DEBUG
4647 // Do a full verify after building the graph and computing dominators.
4648 Verify(true);
4649#endif
4650
4651 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4652 Run<HEnvironmentLivenessAnalysisPhase>();
4653 }
4654
4655 if (!CheckConstPhiUses()) {
4656 *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4657 return false;
4658 }
4659 Run<HRedundantPhiEliminationPhase>();
4660 if (!CheckArgumentsPhiUses()) {
4661 *bailout_reason = kUnsupportedPhiUseOfArguments;
4662 return false;
4663 }
4664
4665 // Find and mark unreachable code to simplify optimizations, especially gvn,
4666 // where unreachable code could unnecessarily defeat LICM.
4667 Run<HMarkUnreachableBlocksPhase>();
4668
4669 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4670 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4671
4672 if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4673
4674 CollectPhis();
4675
4676 if (has_osr()) osr()->FinishOsrValues();
4677
4678 Run<HInferRepresentationPhase>();
4679
4680 // Remove HSimulate instructions that have turned out not to be needed
4681 // after all by folding them into the following HSimulate.
4682 // This must happen after inferring representations.
4683 Run<HMergeRemovableSimulatesPhase>();
4684
4685 Run<HMarkDeoptimizeOnUndefinedPhase>();
4686 Run<HRepresentationChangesPhase>();
4687
4688 Run<HInferTypesPhase>();
4689
4690 // Must be performed before canonicalization to ensure that Canonicalize
4691 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4692 // zero.
4693 Run<HUint32AnalysisPhase>();
4694
4695 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4696
4697 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4698
4699 if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4700
4701 if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4702
4703 Run<HRangeAnalysisPhase>();
4704
4705 Run<HComputeChangeUndefinedToNaN>();
4706
4707 // Eliminate redundant stack checks on backwards branches.
4708 Run<HStackCheckEliminationPhase>();
4709
4710 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004711 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4712 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4713
4714 RestoreActualValues();
4715
4716 // Find unreachable code a second time, GVN and other optimizations may have
4717 // made blocks unreachable that were previously reachable.
4718 Run<HMarkUnreachableBlocksPhase>();
4719
4720 return true;
4721}
4722
4723
4724void HGraph::RestoreActualValues() {
4725 HPhase phase("H_Restore actual values", this);
4726
4727 for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4728 HBasicBlock* block = blocks()->at(block_index);
4729
4730#ifdef DEBUG
4731 for (int i = 0; i < block->phis()->length(); i++) {
4732 HPhi* phi = block->phis()->at(i);
4733 DCHECK(phi->ActualValue() == phi);
4734 }
4735#endif
4736
4737 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4738 HInstruction* instruction = it.Current();
4739 if (instruction->ActualValue() == instruction) continue;
4740 if (instruction->CheckFlag(HValue::kIsDead)) {
4741 // The instruction was marked as deleted but left in the graph
4742 // as a control flow dependency point for subsequent
4743 // instructions.
4744 instruction->DeleteAndReplaceWith(instruction->ActualValue());
4745 } else {
4746 DCHECK(instruction->IsInformativeDefinition());
4747 if (instruction->IsPurelyInformativeDefinition()) {
4748 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4749 } else {
4750 instruction->ReplaceAllUsesWith(instruction->ActualValue());
4751 }
4752 }
4753 }
4754 }
4755}
4756
4757
4758void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4759 ZoneList<HValue*> arguments(count, zone());
4760 for (int i = 0; i < count; ++i) {
4761 arguments.Add(Pop(), zone());
4762 }
4763
4764 HPushArguments* push_args = New<HPushArguments>();
4765 while (!arguments.is_empty()) {
4766 push_args->AddInput(arguments.RemoveLast());
4767 }
4768 AddInstruction(push_args);
4769}
4770
4771
4772template <class Instruction>
4773HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4774 PushArgumentsFromEnvironment(call->argument_count());
4775 return call;
4776}
4777
4778
4779void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
4780 HEnvironment* prolog_env = environment();
4781 int parameter_count = environment()->parameter_count();
4782 ZoneList<HValue*> parameters(parameter_count, zone());
4783 for (int i = 0; i < parameter_count; ++i) {
4784 HInstruction* parameter = Add<HParameter>(static_cast<unsigned>(i));
4785 parameters.Add(parameter, zone());
4786 environment()->Bind(i, parameter);
4787 }
4788
4789 HConstant* undefined_constant = graph()->GetConstantUndefined();
4790 // Initialize specials and locals to undefined.
4791 for (int i = parameter_count + 1; i < environment()->length(); ++i) {
4792 environment()->Bind(i, undefined_constant);
4793 }
4794 Add<HPrologue>();
4795
4796 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4797 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4798 GotoNoSimulate(body_entry);
4799 set_current_block(body_entry);
4800
4801 // Initialize context of prolog environment to undefined.
4802 prolog_env->BindContext(undefined_constant);
4803
4804 // First special is HContext.
4805 HInstruction* context = Add<HContext>();
4806 environment()->BindContext(context);
4807
4808 // Create an arguments object containing the initial parameters. Set the
4809 // initial values of parameters including "this" having parameter index 0.
4810 DCHECK_EQ(scope->num_parameters() + 1, parameter_count);
4811 HArgumentsObject* arguments_object = New<HArgumentsObject>(parameter_count);
4812 for (int i = 0; i < parameter_count; ++i) {
4813 HValue* parameter = parameters.at(i);
4814 arguments_object->AddArgument(parameter, zone());
4815 }
4816
4817 AddInstruction(arguments_object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004818
4819 // Handle the arguments and arguments shadow variables specially (they do
4820 // not have declarations).
4821 if (scope->arguments() != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01004822 environment()->Bind(scope->arguments(), arguments_object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004823 }
4824
4825 int rest_index;
4826 Variable* rest = scope->rest_parameter(&rest_index);
4827 if (rest) {
4828 return Bailout(kRestParameter);
4829 }
4830
4831 if (scope->this_function_var() != nullptr ||
4832 scope->new_target_var() != nullptr) {
4833 return Bailout(kSuperReference);
4834 }
4835
4836 // Trace the call.
4837 if (FLAG_trace && top_info()->IsOptimizing()) {
4838 Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kTraceEnter), 0);
4839 }
4840}
4841
4842
4843void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4844 for (int i = 0; i < statements->length(); i++) {
4845 Statement* stmt = statements->at(i);
4846 CHECK_ALIVE(Visit(stmt));
4847 if (stmt->IsJump()) break;
4848 }
4849}
4850
4851
4852void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4853 DCHECK(!HasStackOverflow());
4854 DCHECK(current_block() != NULL);
4855 DCHECK(current_block()->HasPredecessor());
4856
4857 Scope* outer_scope = scope();
4858 Scope* scope = stmt->scope();
4859 BreakAndContinueInfo break_info(stmt, outer_scope);
4860
4861 { BreakAndContinueScope push(&break_info, this);
4862 if (scope != NULL) {
4863 if (scope->NeedsContext()) {
4864 // Load the function object.
4865 Scope* declaration_scope = scope->DeclarationScope();
4866 HInstruction* function;
4867 HValue* outer_context = environment()->context();
4868 if (declaration_scope->is_script_scope() ||
4869 declaration_scope->is_eval_scope()) {
4870 function = new (zone())
4871 HLoadContextSlot(outer_context, Context::CLOSURE_INDEX,
4872 HLoadContextSlot::kNoCheck);
4873 } else {
4874 function = New<HThisFunction>();
4875 }
4876 AddInstruction(function);
4877 // Allocate a block context and store it to the stack frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004878 HValue* scope_info = Add<HConstant>(scope->GetScopeInfo(isolate()));
4879 Add<HPushArguments>(scope_info, function);
4880 HInstruction* inner_context = Add<HCallRuntime>(
4881 Runtime::FunctionForId(Runtime::kPushBlockContext), 2);
4882 inner_context->SetFlag(HValue::kHasNoObservableSideEffects);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004883 set_scope(scope);
4884 environment()->BindContext(inner_context);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004885 }
4886 VisitDeclarations(scope->declarations());
4887 AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
4888 }
4889 CHECK_BAILOUT(VisitStatements(stmt->statements()));
4890 }
4891 set_scope(outer_scope);
4892 if (scope != NULL && current_block() != NULL &&
4893 scope->ContextLocalCount() > 0) {
4894 HValue* inner_context = environment()->context();
4895 HValue* outer_context = Add<HLoadNamedField>(
4896 inner_context, nullptr,
4897 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4898
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004899 environment()->BindContext(outer_context);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004900 }
4901 HBasicBlock* break_block = break_info.break_block();
4902 if (break_block != NULL) {
4903 if (current_block() != NULL) Goto(break_block);
4904 break_block->SetJoinId(stmt->ExitId());
4905 set_current_block(break_block);
4906 }
4907}
4908
4909
4910void HOptimizedGraphBuilder::VisitExpressionStatement(
4911 ExpressionStatement* stmt) {
4912 DCHECK(!HasStackOverflow());
4913 DCHECK(current_block() != NULL);
4914 DCHECK(current_block()->HasPredecessor());
4915 VisitForEffect(stmt->expression());
4916}
4917
4918
4919void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4920 DCHECK(!HasStackOverflow());
4921 DCHECK(current_block() != NULL);
4922 DCHECK(current_block()->HasPredecessor());
4923}
4924
4925
4926void HOptimizedGraphBuilder::VisitSloppyBlockFunctionStatement(
4927 SloppyBlockFunctionStatement* stmt) {
4928 Visit(stmt->statement());
4929}
4930
4931
4932void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4933 DCHECK(!HasStackOverflow());
4934 DCHECK(current_block() != NULL);
4935 DCHECK(current_block()->HasPredecessor());
4936 if (stmt->condition()->ToBooleanIsTrue()) {
4937 Add<HSimulate>(stmt->ThenId());
4938 Visit(stmt->then_statement());
4939 } else if (stmt->condition()->ToBooleanIsFalse()) {
4940 Add<HSimulate>(stmt->ElseId());
4941 Visit(stmt->else_statement());
4942 } else {
4943 HBasicBlock* cond_true = graph()->CreateBasicBlock();
4944 HBasicBlock* cond_false = graph()->CreateBasicBlock();
4945 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4946
Ben Murdochda12d292016-06-02 14:46:10 +01004947 // Technically, we should be able to handle the case when one side of
4948 // the test is not connected, but this can trip up liveness analysis
4949 // if we did not fully connect the test context based on some optimistic
4950 // assumption. If such an assumption was violated, we would end up with
4951 // an environment with optimized-out values. So we should always
4952 // conservatively connect the test context.
4953 CHECK(cond_true->HasPredecessor());
4954 CHECK(cond_false->HasPredecessor());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004955
Ben Murdochda12d292016-06-02 14:46:10 +01004956 cond_true->SetJoinId(stmt->ThenId());
4957 set_current_block(cond_true);
4958 CHECK_BAILOUT(Visit(stmt->then_statement()));
4959 cond_true = current_block();
4960
4961 cond_false->SetJoinId(stmt->ElseId());
4962 set_current_block(cond_false);
4963 CHECK_BAILOUT(Visit(stmt->else_statement()));
4964 cond_false = current_block();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004965
4966 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4967 set_current_block(join);
4968 }
4969}
4970
4971
4972HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4973 BreakableStatement* stmt,
4974 BreakType type,
4975 Scope** scope,
4976 int* drop_extra) {
4977 *drop_extra = 0;
4978 BreakAndContinueScope* current = this;
4979 while (current != NULL && current->info()->target() != stmt) {
4980 *drop_extra += current->info()->drop_extra();
4981 current = current->next();
4982 }
4983 DCHECK(current != NULL); // Always found (unless stack is malformed).
4984 *scope = current->info()->scope();
4985
4986 if (type == BREAK) {
4987 *drop_extra += current->info()->drop_extra();
4988 }
4989
4990 HBasicBlock* block = NULL;
4991 switch (type) {
4992 case BREAK:
4993 block = current->info()->break_block();
4994 if (block == NULL) {
4995 block = current->owner()->graph()->CreateBasicBlock();
4996 current->info()->set_break_block(block);
4997 }
4998 break;
4999
5000 case CONTINUE:
5001 block = current->info()->continue_block();
5002 if (block == NULL) {
5003 block = current->owner()->graph()->CreateBasicBlock();
5004 current->info()->set_continue_block(block);
5005 }
5006 break;
5007 }
5008
5009 return block;
5010}
5011
5012
5013void HOptimizedGraphBuilder::VisitContinueStatement(
5014 ContinueStatement* stmt) {
5015 DCHECK(!HasStackOverflow());
5016 DCHECK(current_block() != NULL);
5017 DCHECK(current_block()->HasPredecessor());
Ben Murdochda12d292016-06-02 14:46:10 +01005018
5019 if (function_state()->IsInsideDoExpressionScope()) {
5020 return Bailout(kDoExpressionUnmodelable);
5021 }
5022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005023 Scope* outer_scope = NULL;
5024 Scope* inner_scope = scope();
5025 int drop_extra = 0;
5026 HBasicBlock* continue_block = break_scope()->Get(
5027 stmt->target(), BreakAndContinueScope::CONTINUE,
5028 &outer_scope, &drop_extra);
5029 HValue* context = environment()->context();
5030 Drop(drop_extra);
5031 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
5032 if (context_pop_count > 0) {
5033 while (context_pop_count-- > 0) {
5034 HInstruction* context_instruction = Add<HLoadNamedField>(
5035 context, nullptr,
5036 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5037 context = context_instruction;
5038 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005039 environment()->BindContext(context);
5040 }
5041
5042 Goto(continue_block);
5043 set_current_block(NULL);
5044}
5045
5046
5047void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
5048 DCHECK(!HasStackOverflow());
5049 DCHECK(current_block() != NULL);
5050 DCHECK(current_block()->HasPredecessor());
Ben Murdochda12d292016-06-02 14:46:10 +01005051
5052 if (function_state()->IsInsideDoExpressionScope()) {
5053 return Bailout(kDoExpressionUnmodelable);
5054 }
5055
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005056 Scope* outer_scope = NULL;
5057 Scope* inner_scope = scope();
5058 int drop_extra = 0;
5059 HBasicBlock* break_block = break_scope()->Get(
5060 stmt->target(), BreakAndContinueScope::BREAK,
5061 &outer_scope, &drop_extra);
5062 HValue* context = environment()->context();
5063 Drop(drop_extra);
5064 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
5065 if (context_pop_count > 0) {
5066 while (context_pop_count-- > 0) {
5067 HInstruction* context_instruction = Add<HLoadNamedField>(
5068 context, nullptr,
5069 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5070 context = context_instruction;
5071 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005072 environment()->BindContext(context);
5073 }
5074 Goto(break_block);
5075 set_current_block(NULL);
5076}
5077
5078
5079void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
5080 DCHECK(!HasStackOverflow());
5081 DCHECK(current_block() != NULL);
5082 DCHECK(current_block()->HasPredecessor());
5083 FunctionState* state = function_state();
5084 AstContext* context = call_context();
5085 if (context == NULL) {
5086 // Not an inlined return, so an actual one.
5087 CHECK_ALIVE(VisitForValue(stmt->expression()));
5088 HValue* result = environment()->Pop();
5089 Add<HReturn>(result);
5090 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
5091 // Return from an inlined construct call. In a test context the return value
5092 // will always evaluate to true, in a value context the return value needs
5093 // to be a JSObject.
5094 if (context->IsTest()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005095 CHECK_ALIVE(VisitForEffect(stmt->expression()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005096 context->ReturnValue(graph()->GetConstantTrue());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005097 } else if (context->IsEffect()) {
5098 CHECK_ALIVE(VisitForEffect(stmt->expression()));
5099 Goto(function_return(), state);
5100 } else {
5101 DCHECK(context->IsValue());
5102 CHECK_ALIVE(VisitForValue(stmt->expression()));
5103 HValue* return_value = Pop();
5104 HValue* receiver = environment()->arguments_environment()->Lookup(0);
5105 HHasInstanceTypeAndBranch* typecheck =
5106 New<HHasInstanceTypeAndBranch>(return_value,
5107 FIRST_JS_RECEIVER_TYPE,
5108 LAST_JS_RECEIVER_TYPE);
5109 HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
5110 HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
5111 typecheck->SetSuccessorAt(0, if_spec_object);
5112 typecheck->SetSuccessorAt(1, not_spec_object);
5113 FinishCurrentBlock(typecheck);
5114 AddLeaveInlined(if_spec_object, return_value, state);
5115 AddLeaveInlined(not_spec_object, receiver, state);
5116 }
5117 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
5118 // Return from an inlined setter call. The returned value is never used, the
5119 // value of an assignment is always the value of the RHS of the assignment.
5120 CHECK_ALIVE(VisitForEffect(stmt->expression()));
5121 if (context->IsTest()) {
5122 HValue* rhs = environment()->arguments_environment()->Lookup(1);
5123 context->ReturnValue(rhs);
5124 } else if (context->IsEffect()) {
5125 Goto(function_return(), state);
5126 } else {
5127 DCHECK(context->IsValue());
5128 HValue* rhs = environment()->arguments_environment()->Lookup(1);
5129 AddLeaveInlined(rhs, state);
5130 }
5131 } else {
5132 // Return from a normal inlined function. Visit the subexpression in the
5133 // expression context of the call.
5134 if (context->IsTest()) {
5135 TestContext* test = TestContext::cast(context);
5136 VisitForControl(stmt->expression(), test->if_true(), test->if_false());
5137 } else if (context->IsEffect()) {
5138 // Visit in value context and ignore the result. This is needed to keep
5139 // environment in sync with full-codegen since some visitors (e.g.
5140 // VisitCountOperation) use the operand stack differently depending on
5141 // context.
5142 CHECK_ALIVE(VisitForValue(stmt->expression()));
5143 Pop();
5144 Goto(function_return(), state);
5145 } else {
5146 DCHECK(context->IsValue());
5147 CHECK_ALIVE(VisitForValue(stmt->expression()));
5148 AddLeaveInlined(Pop(), state);
5149 }
5150 }
5151 set_current_block(NULL);
5152}
5153
5154
5155void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
5156 DCHECK(!HasStackOverflow());
5157 DCHECK(current_block() != NULL);
5158 DCHECK(current_block()->HasPredecessor());
5159 return Bailout(kWithStatement);
5160}
5161
5162
5163void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
5164 DCHECK(!HasStackOverflow());
5165 DCHECK(current_block() != NULL);
5166 DCHECK(current_block()->HasPredecessor());
5167
5168 ZoneList<CaseClause*>* clauses = stmt->cases();
5169 int clause_count = clauses->length();
5170 ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
5171
5172 CHECK_ALIVE(VisitForValue(stmt->tag()));
5173 Add<HSimulate>(stmt->EntryId());
5174 HValue* tag_value = Top();
Ben Murdochc5610432016-08-08 18:44:38 +01005175 Type* tag_type = bounds_.get(stmt->tag()).lower;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005176
5177 // 1. Build all the tests, with dangling true branches
5178 BailoutId default_id = BailoutId::None();
5179 for (int i = 0; i < clause_count; ++i) {
5180 CaseClause* clause = clauses->at(i);
5181 if (clause->is_default()) {
5182 body_blocks.Add(NULL, zone());
5183 if (default_id.IsNone()) default_id = clause->EntryId();
5184 continue;
5185 }
5186
5187 // Generate a compare and branch.
5188 CHECK_BAILOUT(VisitForValue(clause->label()));
5189 if (current_block() == NULL) return Bailout(kUnsupportedSwitchStatement);
5190 HValue* label_value = Pop();
5191
Ben Murdochc5610432016-08-08 18:44:38 +01005192 Type* label_type = bounds_.get(clause->label()).lower;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005193 Type* combined_type = clause->compare_type();
5194 HControlInstruction* compare = BuildCompareInstruction(
5195 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
5196 combined_type,
5197 ScriptPositionToSourcePosition(stmt->tag()->position()),
5198 ScriptPositionToSourcePosition(clause->label()->position()),
5199 PUSH_BEFORE_SIMULATE, clause->id());
5200
5201 HBasicBlock* next_test_block = graph()->CreateBasicBlock();
5202 HBasicBlock* body_block = graph()->CreateBasicBlock();
5203 body_blocks.Add(body_block, zone());
5204 compare->SetSuccessorAt(0, body_block);
5205 compare->SetSuccessorAt(1, next_test_block);
5206 FinishCurrentBlock(compare);
5207
5208 set_current_block(body_block);
5209 Drop(1); // tag_value
5210
5211 set_current_block(next_test_block);
5212 }
5213
5214 // Save the current block to use for the default or to join with the
5215 // exit.
5216 HBasicBlock* last_block = current_block();
5217 Drop(1); // tag_value
5218
5219 // 2. Loop over the clauses and the linked list of tests in lockstep,
5220 // translating the clause bodies.
5221 HBasicBlock* fall_through_block = NULL;
5222
5223 BreakAndContinueInfo break_info(stmt, scope());
5224 { BreakAndContinueScope push(&break_info, this);
5225 for (int i = 0; i < clause_count; ++i) {
5226 CaseClause* clause = clauses->at(i);
5227
5228 // Identify the block where normal (non-fall-through) control flow
5229 // goes to.
5230 HBasicBlock* normal_block = NULL;
5231 if (clause->is_default()) {
5232 if (last_block == NULL) continue;
5233 normal_block = last_block;
5234 last_block = NULL; // Cleared to indicate we've handled it.
5235 } else {
5236 normal_block = body_blocks[i];
5237 }
5238
5239 if (fall_through_block == NULL) {
5240 set_current_block(normal_block);
5241 } else {
5242 HBasicBlock* join = CreateJoin(fall_through_block,
5243 normal_block,
5244 clause->EntryId());
5245 set_current_block(join);
5246 }
5247
5248 CHECK_BAILOUT(VisitStatements(clause->statements()));
5249 fall_through_block = current_block();
5250 }
5251 }
5252
5253 // Create an up-to-3-way join. Use the break block if it exists since
5254 // it's already a join block.
5255 HBasicBlock* break_block = break_info.break_block();
5256 if (break_block == NULL) {
5257 set_current_block(CreateJoin(fall_through_block,
5258 last_block,
5259 stmt->ExitId()));
5260 } else {
5261 if (fall_through_block != NULL) Goto(fall_through_block, break_block);
5262 if (last_block != NULL) Goto(last_block, break_block);
5263 break_block->SetJoinId(stmt->ExitId());
5264 set_current_block(break_block);
5265 }
5266}
5267
5268
5269void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
5270 HBasicBlock* loop_entry) {
5271 Add<HSimulate>(stmt->StackCheckId());
5272 HStackCheck* stack_check =
5273 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
5274 DCHECK(loop_entry->IsLoopHeader());
5275 loop_entry->loop_information()->set_stack_check(stack_check);
5276 CHECK_BAILOUT(Visit(stmt->body()));
5277}
5278
5279
5280void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
5281 DCHECK(!HasStackOverflow());
5282 DCHECK(current_block() != NULL);
5283 DCHECK(current_block()->HasPredecessor());
5284 DCHECK(current_block() != NULL);
5285 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5286
5287 BreakAndContinueInfo break_info(stmt, scope());
5288 {
5289 BreakAndContinueScope push(&break_info, this);
5290 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5291 }
5292 HBasicBlock* body_exit =
5293 JoinContinue(stmt, current_block(), break_info.continue_block());
5294 HBasicBlock* loop_successor = NULL;
Ben Murdochda12d292016-06-02 14:46:10 +01005295 if (body_exit != NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005296 set_current_block(body_exit);
5297 loop_successor = graph()->CreateBasicBlock();
5298 if (stmt->cond()->ToBooleanIsFalse()) {
5299 loop_entry->loop_information()->stack_check()->Eliminate();
5300 Goto(loop_successor);
5301 body_exit = NULL;
5302 } else {
5303 // The block for a true condition, the actual predecessor block of the
5304 // back edge.
5305 body_exit = graph()->CreateBasicBlock();
5306 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
5307 }
5308 if (body_exit != NULL && body_exit->HasPredecessor()) {
5309 body_exit->SetJoinId(stmt->BackEdgeId());
5310 } else {
5311 body_exit = NULL;
5312 }
5313 if (loop_successor->HasPredecessor()) {
5314 loop_successor->SetJoinId(stmt->ExitId());
5315 } else {
5316 loop_successor = NULL;
5317 }
5318 }
5319 HBasicBlock* loop_exit = CreateLoop(stmt,
5320 loop_entry,
5321 body_exit,
5322 loop_successor,
5323 break_info.break_block());
5324 set_current_block(loop_exit);
5325}
5326
5327
5328void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
5329 DCHECK(!HasStackOverflow());
5330 DCHECK(current_block() != NULL);
5331 DCHECK(current_block()->HasPredecessor());
5332 DCHECK(current_block() != NULL);
5333 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5334
5335 // If the condition is constant true, do not generate a branch.
5336 HBasicBlock* loop_successor = NULL;
Ben Murdochda12d292016-06-02 14:46:10 +01005337 HBasicBlock* body_entry = graph()->CreateBasicBlock();
5338 loop_successor = graph()->CreateBasicBlock();
5339 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5340 if (body_entry->HasPredecessor()) {
5341 body_entry->SetJoinId(stmt->BodyId());
5342 set_current_block(body_entry);
5343 }
5344 if (loop_successor->HasPredecessor()) {
5345 loop_successor->SetJoinId(stmt->ExitId());
5346 } else {
5347 loop_successor = NULL;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005348 }
5349
5350 BreakAndContinueInfo break_info(stmt, scope());
5351 if (current_block() != NULL) {
5352 BreakAndContinueScope push(&break_info, this);
5353 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5354 }
5355 HBasicBlock* body_exit =
5356 JoinContinue(stmt, current_block(), break_info.continue_block());
5357 HBasicBlock* loop_exit = CreateLoop(stmt,
5358 loop_entry,
5359 body_exit,
5360 loop_successor,
5361 break_info.break_block());
5362 set_current_block(loop_exit);
5363}
5364
5365
5366void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
5367 DCHECK(!HasStackOverflow());
5368 DCHECK(current_block() != NULL);
5369 DCHECK(current_block()->HasPredecessor());
5370 if (stmt->init() != NULL) {
5371 CHECK_ALIVE(Visit(stmt->init()));
5372 }
5373 DCHECK(current_block() != NULL);
5374 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5375
Ben Murdochda12d292016-06-02 14:46:10 +01005376 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
5377 HBasicBlock* body_entry = graph()->CreateBasicBlock();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005378 if (stmt->cond() != NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005379 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5380 if (body_entry->HasPredecessor()) {
5381 body_entry->SetJoinId(stmt->BodyId());
5382 set_current_block(body_entry);
5383 }
5384 if (loop_successor->HasPredecessor()) {
5385 loop_successor->SetJoinId(stmt->ExitId());
5386 } else {
5387 loop_successor = NULL;
5388 }
Ben Murdochda12d292016-06-02 14:46:10 +01005389 } else {
5390 // Create dummy control flow so that variable liveness analysis
5391 // produces teh correct result.
5392 HControlInstruction* branch = New<HBranch>(graph()->GetConstantTrue());
5393 branch->SetSuccessorAt(0, body_entry);
5394 branch->SetSuccessorAt(1, loop_successor);
5395 FinishCurrentBlock(branch);
5396 set_current_block(body_entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005397 }
5398
5399 BreakAndContinueInfo break_info(stmt, scope());
5400 if (current_block() != NULL) {
5401 BreakAndContinueScope push(&break_info, this);
5402 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5403 }
5404 HBasicBlock* body_exit =
5405 JoinContinue(stmt, current_block(), break_info.continue_block());
5406
5407 if (stmt->next() != NULL && body_exit != NULL) {
5408 set_current_block(body_exit);
5409 CHECK_BAILOUT(Visit(stmt->next()));
5410 body_exit = current_block();
5411 }
5412
5413 HBasicBlock* loop_exit = CreateLoop(stmt,
5414 loop_entry,
5415 body_exit,
5416 loop_successor,
5417 break_info.break_block());
5418 set_current_block(loop_exit);
5419}
5420
5421
5422void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
5423 DCHECK(!HasStackOverflow());
5424 DCHECK(current_block() != NULL);
5425 DCHECK(current_block()->HasPredecessor());
5426
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005427 if (!stmt->each()->IsVariableProxy() ||
5428 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
5429 return Bailout(kForInStatementWithNonLocalEachVariable);
5430 }
5431
5432 Variable* each_var = stmt->each()->AsVariableProxy()->var();
5433
5434 CHECK_ALIVE(VisitForValue(stmt->enumerable()));
5435 HValue* enumerable = Top(); // Leave enumerable at the top.
5436
5437 IfBuilder if_undefined_or_null(this);
5438 if_undefined_or_null.If<HCompareObjectEqAndBranch>(
5439 enumerable, graph()->GetConstantUndefined());
5440 if_undefined_or_null.Or();
5441 if_undefined_or_null.If<HCompareObjectEqAndBranch>(
5442 enumerable, graph()->GetConstantNull());
5443 if_undefined_or_null.ThenDeopt(Deoptimizer::kUndefinedOrNullInForIn);
5444 if_undefined_or_null.End();
5445 BuildForInBody(stmt, each_var, enumerable);
5446}
5447
5448
5449void HOptimizedGraphBuilder::BuildForInBody(ForInStatement* stmt,
5450 Variable* each_var,
5451 HValue* enumerable) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005452 Handle<Map> meta_map = isolate()->factory()->meta_map();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005453 bool fast = stmt->for_in_type() == ForInStatement::FAST_FOR_IN;
Ben Murdoch097c5b22016-05-18 11:27:45 +01005454 BuildCheckHeapObject(enumerable);
5455 Add<HCheckInstanceType>(enumerable, HCheckInstanceType::IS_JS_RECEIVER);
5456 Add<HSimulate>(stmt->ToObjectId());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005457 if (fast) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005458 HForInPrepareMap* map = Add<HForInPrepareMap>(enumerable);
5459 Push(map);
5460 Add<HSimulate>(stmt->EnumId());
5461 Drop(1);
5462 Add<HCheckMaps>(map, meta_map);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005463
Ben Murdoch097c5b22016-05-18 11:27:45 +01005464 HForInCacheArray* array = Add<HForInCacheArray>(
5465 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
5466 HValue* enum_length = BuildEnumLength(map);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005467
Ben Murdoch097c5b22016-05-18 11:27:45 +01005468 HForInCacheArray* index_cache = Add<HForInCacheArray>(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005469 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005470 array->set_index_cache(index_cache);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005471
Ben Murdoch097c5b22016-05-18 11:27:45 +01005472 Push(map);
5473 Push(array);
5474 Push(enum_length);
5475 Add<HSimulate>(stmt->PrepareId());
5476 } else {
5477 Runtime::FunctionId function_id = Runtime::kForInEnumerate;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005478 Add<HPushArguments>(enumerable);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005479 HCallRuntime* array =
5480 Add<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005481 Push(array);
5482 Add<HSimulate>(stmt->EnumId());
5483 Drop(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005484
5485 IfBuilder if_fast(this);
5486 if_fast.If<HCompareMap>(array, meta_map);
5487 if_fast.Then();
5488 {
5489 HValue* cache_map = array;
5490 HForInCacheArray* cache = Add<HForInCacheArray>(
5491 enumerable, cache_map, DescriptorArray::kEnumCacheBridgeCacheIndex);
5492 HValue* enum_length = BuildEnumLength(cache_map);
5493 Push(cache_map);
5494 Push(cache);
5495 Push(enum_length);
5496 Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
5497 }
5498 if_fast.Else();
5499 {
5500 Push(graph()->GetConstant1());
5501 Push(array);
5502 Push(AddLoadFixedArrayLength(array));
5503 Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
5504 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005505 }
5506
Ben Murdoch097c5b22016-05-18 11:27:45 +01005507 Push(graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005508
5509 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5510
5511 // Reload the values to ensure we have up-to-date values inside of the loop.
5512 // This is relevant especially for OSR where the values don't come from the
5513 // computation above, but from the OSR entry block.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005514 HValue* index = environment()->ExpressionStackAt(0);
5515 HValue* limit = environment()->ExpressionStackAt(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005516 HValue* array = environment()->ExpressionStackAt(2);
5517 HValue* type = environment()->ExpressionStackAt(3);
5518 enumerable = environment()->ExpressionStackAt(4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005519
5520 // Check that we still have more keys.
5521 HCompareNumericAndBranch* compare_index =
5522 New<HCompareNumericAndBranch>(index, limit, Token::LT);
5523 compare_index->set_observed_input_representation(
5524 Representation::Smi(), Representation::Smi());
5525
5526 HBasicBlock* loop_body = graph()->CreateBasicBlock();
5527 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
5528
5529 compare_index->SetSuccessorAt(0, loop_body);
5530 compare_index->SetSuccessorAt(1, loop_successor);
5531 FinishCurrentBlock(compare_index);
5532
5533 set_current_block(loop_successor);
5534 Drop(5);
5535
5536 set_current_block(loop_body);
5537
Ben Murdoch097c5b22016-05-18 11:27:45 +01005538 // Compute the next enumerated value.
5539 HValue* key = Add<HLoadKeyed>(array, index, index, nullptr, FAST_ELEMENTS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005540
Ben Murdoch097c5b22016-05-18 11:27:45 +01005541 HBasicBlock* continue_block = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005542 if (fast) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005543 // Check if expected map still matches that of the enumerable.
5544 Add<HCheckMapValue>(enumerable, type);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005545 Add<HSimulate>(stmt->FilterId());
Ben Murdoch097c5b22016-05-18 11:27:45 +01005546 } else {
5547 // We need the continue block here to be able to skip over invalidated keys.
5548 continue_block = graph()->CreateBasicBlock();
5549
5550 // We cannot use the IfBuilder here, since we need to be able to jump
5551 // over the loop body in case of undefined result from %ForInFilter,
5552 // and the poor soul that is the IfBuilder get's really confused about
5553 // such "advanced control flow requirements".
5554 HBasicBlock* if_fast = graph()->CreateBasicBlock();
5555 HBasicBlock* if_slow = graph()->CreateBasicBlock();
5556 HBasicBlock* if_slow_pass = graph()->CreateBasicBlock();
5557 HBasicBlock* if_slow_skip = graph()->CreateBasicBlock();
5558 HBasicBlock* if_join = graph()->CreateBasicBlock();
5559
5560 // Check if expected map still matches that of the enumerable.
5561 HValue* enumerable_map =
5562 Add<HLoadNamedField>(enumerable, nullptr, HObjectAccess::ForMap());
5563 FinishCurrentBlock(
5564 New<HCompareObjectEqAndBranch>(enumerable_map, type, if_fast, if_slow));
5565 set_current_block(if_fast);
5566 {
5567 // The enum cache for enumerable is still valid, no need to check key.
5568 Push(key);
5569 Goto(if_join);
5570 }
5571 set_current_block(if_slow);
5572 {
5573 // Check if key is still valid for enumerable.
5574 Add<HPushArguments>(enumerable, key);
5575 Runtime::FunctionId function_id = Runtime::kForInFilter;
5576 Push(Add<HCallRuntime>(Runtime::FunctionForId(function_id), 2));
5577 Add<HSimulate>(stmt->FilterId());
5578 FinishCurrentBlock(New<HCompareObjectEqAndBranch>(
5579 Top(), graph()->GetConstantUndefined(), if_slow_skip, if_slow_pass));
5580 }
5581 set_current_block(if_slow_pass);
5582 { Goto(if_join); }
5583 set_current_block(if_slow_skip);
5584 {
5585 // The key is no longer valid for enumerable, skip it.
5586 Drop(1);
5587 Goto(continue_block);
5588 }
5589 if_join->SetJoinId(stmt->FilterId());
5590 set_current_block(if_join);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005591 key = Pop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005592 }
5593
Ben Murdoch097c5b22016-05-18 11:27:45 +01005594 Bind(each_var, key);
5595 Add<HSimulate>(stmt->AssignmentId());
5596
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005597 BreakAndContinueInfo break_info(stmt, scope(), 5);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005598 break_info.set_continue_block(continue_block);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005599 {
5600 BreakAndContinueScope push(&break_info, this);
5601 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5602 }
5603
5604 HBasicBlock* body_exit =
5605 JoinContinue(stmt, current_block(), break_info.continue_block());
5606
5607 if (body_exit != NULL) {
5608 set_current_block(body_exit);
5609
5610 HValue* current_index = Pop();
Ben Murdoch097c5b22016-05-18 11:27:45 +01005611 HValue* increment =
5612 AddUncasted<HAdd>(current_index, graph()->GetConstant1());
5613 increment->ClearFlag(HValue::kCanOverflow);
5614 Push(increment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005615 body_exit = current_block();
5616 }
5617
5618 HBasicBlock* loop_exit = CreateLoop(stmt,
5619 loop_entry,
5620 body_exit,
5621 loop_successor,
5622 break_info.break_block());
5623
5624 set_current_block(loop_exit);
5625}
5626
5627
5628void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
5629 DCHECK(!HasStackOverflow());
5630 DCHECK(current_block() != NULL);
5631 DCHECK(current_block()->HasPredecessor());
5632 return Bailout(kForOfStatement);
5633}
5634
5635
5636void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
5637 DCHECK(!HasStackOverflow());
5638 DCHECK(current_block() != NULL);
5639 DCHECK(current_block()->HasPredecessor());
5640 return Bailout(kTryCatchStatement);
5641}
5642
5643
5644void HOptimizedGraphBuilder::VisitTryFinallyStatement(
5645 TryFinallyStatement* stmt) {
5646 DCHECK(!HasStackOverflow());
5647 DCHECK(current_block() != NULL);
5648 DCHECK(current_block()->HasPredecessor());
5649 return Bailout(kTryFinallyStatement);
5650}
5651
5652
5653void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
5654 DCHECK(!HasStackOverflow());
5655 DCHECK(current_block() != NULL);
5656 DCHECK(current_block()->HasPredecessor());
5657 return Bailout(kDebuggerStatement);
5658}
5659
5660
5661void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
5662 UNREACHABLE();
5663}
5664
5665
5666void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
5667 DCHECK(!HasStackOverflow());
5668 DCHECK(current_block() != NULL);
5669 DCHECK(current_block()->HasPredecessor());
5670 Handle<SharedFunctionInfo> shared_info = Compiler::GetSharedFunctionInfo(
5671 expr, current_info()->script(), top_info());
5672 // We also have a stack overflow if the recursive compilation did.
5673 if (HasStackOverflow()) return;
5674 // Use the fast case closure allocation code that allocates in new
Ben Murdoch61f157c2016-09-16 13:49:30 +01005675 // space for nested functions that don't need pretenuring.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005676 HConstant* shared_info_value = Add<HConstant>(shared_info);
5677 HInstruction* instr;
Ben Murdoch61f157c2016-09-16 13:49:30 +01005678 if (!expr->pretenure()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005679 FastNewClosureStub stub(isolate(), shared_info->language_mode(),
5680 shared_info->kind());
5681 FastNewClosureDescriptor descriptor(isolate());
5682 HValue* values[] = {context(), shared_info_value};
5683 HConstant* stub_value = Add<HConstant>(stub.GetCode());
Ben Murdochc5610432016-08-08 18:44:38 +01005684 instr = New<HCallWithDescriptor>(stub_value, 0, descriptor,
5685 ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005686 } else {
5687 Add<HPushArguments>(shared_info_value);
5688 Runtime::FunctionId function_id =
5689 expr->pretenure() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
5690 instr = New<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
5691 }
5692 return ast_context()->ReturnInstruction(instr, expr->id());
5693}
5694
5695
5696void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
5697 DCHECK(!HasStackOverflow());
5698 DCHECK(current_block() != NULL);
5699 DCHECK(current_block()->HasPredecessor());
5700 return Bailout(kClassLiteral);
5701}
5702
5703
5704void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
5705 NativeFunctionLiteral* expr) {
5706 DCHECK(!HasStackOverflow());
5707 DCHECK(current_block() != NULL);
5708 DCHECK(current_block()->HasPredecessor());
5709 return Bailout(kNativeFunctionLiteral);
5710}
5711
5712
5713void HOptimizedGraphBuilder::VisitDoExpression(DoExpression* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01005714 DoExpressionScope scope(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005715 DCHECK(!HasStackOverflow());
5716 DCHECK(current_block() != NULL);
5717 DCHECK(current_block()->HasPredecessor());
Ben Murdochda12d292016-06-02 14:46:10 +01005718 CHECK_ALIVE(VisitBlock(expr->block()));
5719 Visit(expr->result());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005720}
5721
5722
5723void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
5724 DCHECK(!HasStackOverflow());
5725 DCHECK(current_block() != NULL);
5726 DCHECK(current_block()->HasPredecessor());
5727 HBasicBlock* cond_true = graph()->CreateBasicBlock();
5728 HBasicBlock* cond_false = graph()->CreateBasicBlock();
5729 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
5730
5731 // Visit the true and false subexpressions in the same AST context as the
5732 // whole expression.
5733 if (cond_true->HasPredecessor()) {
5734 cond_true->SetJoinId(expr->ThenId());
5735 set_current_block(cond_true);
5736 CHECK_BAILOUT(Visit(expr->then_expression()));
5737 cond_true = current_block();
5738 } else {
5739 cond_true = NULL;
5740 }
5741
5742 if (cond_false->HasPredecessor()) {
5743 cond_false->SetJoinId(expr->ElseId());
5744 set_current_block(cond_false);
5745 CHECK_BAILOUT(Visit(expr->else_expression()));
5746 cond_false = current_block();
5747 } else {
5748 cond_false = NULL;
5749 }
5750
5751 if (!ast_context()->IsTest()) {
5752 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
5753 set_current_block(join);
5754 if (join != NULL && !ast_context()->IsEffect()) {
5755 return ast_context()->ReturnValue(Pop());
5756 }
5757 }
5758}
5759
5760
5761HOptimizedGraphBuilder::GlobalPropertyAccess
5762HOptimizedGraphBuilder::LookupGlobalProperty(Variable* var, LookupIterator* it,
5763 PropertyAccessType access_type) {
5764 if (var->is_this() || !current_info()->has_global_object()) {
5765 return kUseGeneric;
5766 }
5767
5768 switch (it->state()) {
5769 case LookupIterator::ACCESSOR:
5770 case LookupIterator::ACCESS_CHECK:
5771 case LookupIterator::INTERCEPTOR:
5772 case LookupIterator::INTEGER_INDEXED_EXOTIC:
5773 case LookupIterator::NOT_FOUND:
5774 return kUseGeneric;
5775 case LookupIterator::DATA:
5776 if (access_type == STORE && it->IsReadOnly()) return kUseGeneric;
Ben Murdochc5610432016-08-08 18:44:38 +01005777 if (!it->GetHolder<JSObject>()->IsJSGlobalObject()) return kUseGeneric;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005778 return kUseCell;
5779 case LookupIterator::JSPROXY:
5780 case LookupIterator::TRANSITION:
5781 UNREACHABLE();
5782 }
5783 UNREACHABLE();
5784 return kUseGeneric;
5785}
5786
5787
5788HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
5789 DCHECK(var->IsContextSlot());
5790 HValue* context = environment()->context();
5791 int length = scope()->ContextChainLength(var->scope());
5792 while (length-- > 0) {
5793 context = Add<HLoadNamedField>(
5794 context, nullptr,
5795 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5796 }
5797 return context;
5798}
5799
5800
5801void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
5802 DCHECK(!HasStackOverflow());
5803 DCHECK(current_block() != NULL);
5804 DCHECK(current_block()->HasPredecessor());
5805 Variable* variable = expr->var();
5806 switch (variable->location()) {
5807 case VariableLocation::GLOBAL:
5808 case VariableLocation::UNALLOCATED: {
5809 if (IsLexicalVariableMode(variable->mode())) {
5810 // TODO(rossberg): should this be an DCHECK?
5811 return Bailout(kReferenceToGlobalLexicalVariable);
5812 }
5813 // Handle known global constants like 'undefined' specially to avoid a
5814 // load from a global cell for them.
5815 Handle<Object> constant_value =
5816 isolate()->factory()->GlobalConstantFor(variable->name());
5817 if (!constant_value.is_null()) {
5818 HConstant* instr = New<HConstant>(constant_value);
5819 return ast_context()->ReturnInstruction(instr, expr->id());
5820 }
5821
5822 Handle<JSGlobalObject> global(current_info()->global_object());
5823
5824 // Lookup in script contexts.
5825 {
5826 Handle<ScriptContextTable> script_contexts(
5827 global->native_context()->script_context_table());
5828 ScriptContextTable::LookupResult lookup;
5829 if (ScriptContextTable::Lookup(script_contexts, variable->name(),
5830 &lookup)) {
5831 Handle<Context> script_context = ScriptContextTable::GetContext(
5832 script_contexts, lookup.context_index);
5833 Handle<Object> current_value =
Ben Murdoch097c5b22016-05-18 11:27:45 +01005834 FixedArray::get(*script_context, lookup.slot_index, isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005835
5836 // If the values is not the hole, it will stay initialized,
5837 // so no need to generate a check.
Ben Murdoch61f157c2016-09-16 13:49:30 +01005838 if (current_value->IsTheHole(isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005839 return Bailout(kReferenceToUninitializedVariable);
5840 }
5841 HInstruction* result = New<HLoadNamedField>(
5842 Add<HConstant>(script_context), nullptr,
5843 HObjectAccess::ForContextSlot(lookup.slot_index));
5844 return ast_context()->ReturnInstruction(result, expr->id());
5845 }
5846 }
5847
5848 LookupIterator it(global, variable->name(), LookupIterator::OWN);
5849 GlobalPropertyAccess type = LookupGlobalProperty(variable, &it, LOAD);
5850
5851 if (type == kUseCell) {
5852 Handle<PropertyCell> cell = it.GetPropertyCell();
5853 top_info()->dependencies()->AssumePropertyCell(cell);
5854 auto cell_type = it.property_details().cell_type();
5855 if (cell_type == PropertyCellType::kConstant ||
5856 cell_type == PropertyCellType::kUndefined) {
5857 Handle<Object> constant_object(cell->value(), isolate());
5858 if (constant_object->IsConsString()) {
5859 constant_object =
5860 String::Flatten(Handle<String>::cast(constant_object));
5861 }
5862 HConstant* constant = New<HConstant>(constant_object);
5863 return ast_context()->ReturnInstruction(constant, expr->id());
5864 } else {
5865 auto access = HObjectAccess::ForPropertyCellValue();
5866 UniqueSet<Map>* field_maps = nullptr;
5867 if (cell_type == PropertyCellType::kConstantType) {
5868 switch (cell->GetConstantType()) {
5869 case PropertyCellConstantType::kSmi:
5870 access = access.WithRepresentation(Representation::Smi());
5871 break;
5872 case PropertyCellConstantType::kStableMap: {
5873 // Check that the map really is stable. The heap object could
5874 // have mutated without the cell updating state. In that case,
5875 // make no promises about the loaded value except that it's a
5876 // heap object.
5877 access =
5878 access.WithRepresentation(Representation::HeapObject());
5879 Handle<Map> map(HeapObject::cast(cell->value())->map());
5880 if (map->is_stable()) {
5881 field_maps = new (zone())
5882 UniqueSet<Map>(Unique<Map>::CreateImmovable(map), zone());
5883 }
5884 break;
5885 }
5886 }
5887 }
5888 HConstant* cell_constant = Add<HConstant>(cell);
5889 HLoadNamedField* instr;
5890 if (field_maps == nullptr) {
5891 instr = New<HLoadNamedField>(cell_constant, nullptr, access);
5892 } else {
5893 instr = New<HLoadNamedField>(cell_constant, nullptr, access,
5894 field_maps, HType::HeapObject());
5895 }
5896 instr->ClearDependsOnFlag(kInobjectFields);
5897 instr->SetDependsOnFlag(kGlobalVars);
5898 return ast_context()->ReturnInstruction(instr, expr->id());
5899 }
5900 } else {
Ben Murdoch61f157c2016-09-16 13:49:30 +01005901 Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005902 HLoadGlobalGeneric* instr = New<HLoadGlobalGeneric>(
Ben Murdoch61f157c2016-09-16 13:49:30 +01005903 variable->name(), ast_context()->typeof_mode(), vector,
5904 expr->VariableFeedbackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005905 return ast_context()->ReturnInstruction(instr, expr->id());
5906 }
5907 }
5908
5909 case VariableLocation::PARAMETER:
5910 case VariableLocation::LOCAL: {
5911 HValue* value = LookupAndMakeLive(variable);
5912 if (value == graph()->GetConstantHole()) {
5913 DCHECK(IsDeclaredVariableMode(variable->mode()) &&
5914 variable->mode() != VAR);
5915 return Bailout(kReferenceToUninitializedVariable);
5916 }
5917 return ast_context()->ReturnValue(value);
5918 }
5919
5920 case VariableLocation::CONTEXT: {
5921 HValue* context = BuildContextChainWalk(variable);
5922 HLoadContextSlot::Mode mode;
5923 switch (variable->mode()) {
5924 case LET:
5925 case CONST:
5926 mode = HLoadContextSlot::kCheckDeoptimize;
5927 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005928 default:
5929 mode = HLoadContextSlot::kNoCheck;
5930 break;
5931 }
5932 HLoadContextSlot* instr =
5933 new(zone()) HLoadContextSlot(context, variable->index(), mode);
5934 return ast_context()->ReturnInstruction(instr, expr->id());
5935 }
5936
5937 case VariableLocation::LOOKUP:
5938 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
5939 }
5940}
5941
5942
5943void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
5944 DCHECK(!HasStackOverflow());
5945 DCHECK(current_block() != NULL);
5946 DCHECK(current_block()->HasPredecessor());
5947 HConstant* instr = New<HConstant>(expr->value());
5948 return ast_context()->ReturnInstruction(instr, expr->id());
5949}
5950
5951
5952void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
5953 DCHECK(!HasStackOverflow());
5954 DCHECK(current_block() != NULL);
5955 DCHECK(current_block()->HasPredecessor());
5956 Callable callable = CodeFactory::FastCloneRegExp(isolate());
5957 HValue* values[] = {
5958 context(), AddThisFunction(), Add<HConstant>(expr->literal_index()),
5959 Add<HConstant>(expr->pattern()), Add<HConstant>(expr->flags())};
5960 HConstant* stub_value = Add<HConstant>(callable.code());
Ben Murdochc5610432016-08-08 18:44:38 +01005961 HInstruction* instr = New<HCallWithDescriptor>(
5962 stub_value, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005963 return ast_context()->ReturnInstruction(instr, expr->id());
5964}
5965
5966
5967static bool CanInlinePropertyAccess(Handle<Map> map) {
5968 if (map->instance_type() == HEAP_NUMBER_TYPE) return true;
5969 if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
5970 return map->IsJSObjectMap() && !map->is_dictionary_map() &&
5971 !map->has_named_interceptor() &&
5972 // TODO(verwaest): Whitelist contexts to which we have access.
5973 !map->is_access_check_needed();
5974}
5975
5976
5977// Determines whether the given array or object literal boilerplate satisfies
5978// all limits to be considered for fast deep-copying and computes the total
5979// size of all objects that are part of the graph.
5980static bool IsFastLiteral(Handle<JSObject> boilerplate,
5981 int max_depth,
5982 int* max_properties) {
5983 if (boilerplate->map()->is_deprecated() &&
5984 !JSObject::TryMigrateInstance(boilerplate)) {
5985 return false;
5986 }
5987
5988 DCHECK(max_depth >= 0 && *max_properties >= 0);
5989 if (max_depth == 0) return false;
5990
5991 Isolate* isolate = boilerplate->GetIsolate();
5992 Handle<FixedArrayBase> elements(boilerplate->elements());
5993 if (elements->length() > 0 &&
5994 elements->map() != isolate->heap()->fixed_cow_array_map()) {
5995 if (boilerplate->HasFastSmiOrObjectElements()) {
5996 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5997 int length = elements->length();
5998 for (int i = 0; i < length; i++) {
5999 if ((*max_properties)-- == 0) return false;
6000 Handle<Object> value(fast_elements->get(i), isolate);
6001 if (value->IsJSObject()) {
6002 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
6003 if (!IsFastLiteral(value_object,
6004 max_depth - 1,
6005 max_properties)) {
6006 return false;
6007 }
6008 }
6009 }
6010 } else if (!boilerplate->HasFastDoubleElements()) {
6011 return false;
6012 }
6013 }
6014
6015 Handle<FixedArray> properties(boilerplate->properties());
6016 if (properties->length() > 0) {
6017 return false;
6018 } else {
6019 Handle<DescriptorArray> descriptors(
6020 boilerplate->map()->instance_descriptors());
6021 int limit = boilerplate->map()->NumberOfOwnDescriptors();
6022 for (int i = 0; i < limit; i++) {
6023 PropertyDetails details = descriptors->GetDetails(i);
6024 if (details.type() != DATA) continue;
6025 if ((*max_properties)-- == 0) return false;
6026 FieldIndex field_index = FieldIndex::ForDescriptor(boilerplate->map(), i);
6027 if (boilerplate->IsUnboxedDoubleField(field_index)) continue;
6028 Handle<Object> value(boilerplate->RawFastPropertyAt(field_index),
6029 isolate);
6030 if (value->IsJSObject()) {
6031 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
6032 if (!IsFastLiteral(value_object,
6033 max_depth - 1,
6034 max_properties)) {
6035 return false;
6036 }
6037 }
6038 }
6039 }
6040 return true;
6041}
6042
6043
6044void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
6045 DCHECK(!HasStackOverflow());
6046 DCHECK(current_block() != NULL);
6047 DCHECK(current_block()->HasPredecessor());
6048
6049 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
6050 HInstruction* literal;
6051
6052 // Check whether to use fast or slow deep-copying for boilerplate.
6053 int max_properties = kMaxFastLiteralProperties;
6054 Handle<Object> literals_cell(
6055 closure->literals()->literal(expr->literal_index()), isolate());
6056 Handle<AllocationSite> site;
6057 Handle<JSObject> boilerplate;
Ben Murdoch61f157c2016-09-16 13:49:30 +01006058 if (!literals_cell->IsUndefined(isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006059 // Retrieve the boilerplate
6060 site = Handle<AllocationSite>::cast(literals_cell);
6061 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
6062 isolate());
6063 }
6064
6065 if (!boilerplate.is_null() &&
6066 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
6067 AllocationSiteUsageContext site_context(isolate(), site, false);
6068 site_context.EnterNewScope();
6069 literal = BuildFastLiteral(boilerplate, &site_context);
6070 site_context.ExitScope(site, boilerplate);
6071 } else {
6072 NoObservableSideEffectsScope no_effects(this);
6073 Handle<FixedArray> constant_properties = expr->constant_properties();
6074 int literal_index = expr->literal_index();
6075 int flags = expr->ComputeFlags(true);
6076
6077 Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
6078 Add<HConstant>(constant_properties),
6079 Add<HConstant>(flags));
6080
6081 Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
6082 literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
6083 }
6084
6085 // The object is expected in the bailout environment during computation
6086 // of the property values and is the value of the entire expression.
6087 Push(literal);
6088 for (int i = 0; i < expr->properties()->length(); i++) {
6089 ObjectLiteral::Property* property = expr->properties()->at(i);
6090 if (property->is_computed_name()) return Bailout(kComputedPropertyName);
6091 if (property->IsCompileTimeValue()) continue;
6092
6093 Literal* key = property->key()->AsLiteral();
6094 Expression* value = property->value();
6095
6096 switch (property->kind()) {
6097 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
6098 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
6099 // Fall through.
6100 case ObjectLiteral::Property::COMPUTED:
6101 // It is safe to use [[Put]] here because the boilerplate already
6102 // contains computed properties with an uninitialized value.
6103 if (key->value()->IsInternalizedString()) {
6104 if (property->emit_store()) {
6105 CHECK_ALIVE(VisitForValue(value));
6106 HValue* value = Pop();
6107
6108 Handle<Map> map = property->GetReceiverType();
6109 Handle<String> name = key->AsPropertyName();
6110 HValue* store;
6111 FeedbackVectorSlot slot = property->GetSlot();
6112 if (map.is_null()) {
6113 // If we don't know the monomorphic type, do a generic store.
6114 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal,
6115 name, value));
6116 } else {
6117 PropertyAccessInfo info(this, STORE, map, name);
6118 if (info.CanAccessMonomorphic()) {
6119 HValue* checked_literal = Add<HCheckMaps>(literal, map);
6120 DCHECK(!info.IsAccessorConstant());
6121 store = BuildMonomorphicAccess(
6122 &info, literal, checked_literal, value,
6123 BailoutId::None(), BailoutId::None());
6124 } else {
6125 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot,
6126 literal, name, value));
6127 }
6128 }
6129 if (store->IsInstruction()) {
6130 AddInstruction(HInstruction::cast(store));
6131 }
6132 DCHECK(store->HasObservableSideEffects());
6133 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
6134
6135 // Add [[HomeObject]] to function literals.
6136 if (FunctionLiteral::NeedsHomeObject(property->value())) {
6137 Handle<Symbol> sym = isolate()->factory()->home_object_symbol();
6138 HInstruction* store_home = BuildNamedGeneric(
6139 STORE, NULL, property->GetSlot(1), value, sym, literal);
6140 AddInstruction(store_home);
6141 DCHECK(store_home->HasObservableSideEffects());
6142 Add<HSimulate>(property->value()->id(), REMOVABLE_SIMULATE);
6143 }
6144 } else {
6145 CHECK_ALIVE(VisitForEffect(value));
6146 }
6147 break;
6148 }
6149 // Fall through.
6150 case ObjectLiteral::Property::PROTOTYPE:
6151 case ObjectLiteral::Property::SETTER:
6152 case ObjectLiteral::Property::GETTER:
6153 return Bailout(kObjectLiteralWithComplexProperty);
6154 default: UNREACHABLE();
6155 }
6156 }
6157
Ben Murdochda12d292016-06-02 14:46:10 +01006158 return ast_context()->ReturnValue(Pop());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006159}
6160
6161
6162void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
6163 DCHECK(!HasStackOverflow());
6164 DCHECK(current_block() != NULL);
6165 DCHECK(current_block()->HasPredecessor());
6166 ZoneList<Expression*>* subexprs = expr->values();
6167 int length = subexprs->length();
6168 HInstruction* literal;
6169
6170 Handle<AllocationSite> site;
6171 Handle<LiteralsArray> literals(environment()->closure()->literals(),
6172 isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006173 Handle<Object> literals_cell(literals->literal(expr->literal_index()),
6174 isolate());
6175 Handle<JSObject> boilerplate_object;
Ben Murdoch61f157c2016-09-16 13:49:30 +01006176 if (!literals_cell->IsUndefined(isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006177 DCHECK(literals_cell->IsAllocationSite());
6178 site = Handle<AllocationSite>::cast(literals_cell);
6179 boilerplate_object = Handle<JSObject>(
6180 JSObject::cast(site->transition_info()), isolate());
6181 }
6182
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006183 // Check whether to use fast or slow deep-copying for boilerplate.
6184 int max_properties = kMaxFastLiteralProperties;
Ben Murdochc5610432016-08-08 18:44:38 +01006185 if (!boilerplate_object.is_null() &&
6186 IsFastLiteral(boilerplate_object, kMaxFastLiteralDepth,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006187 &max_properties)) {
Ben Murdochc5610432016-08-08 18:44:38 +01006188 DCHECK(site->SitePointsToLiteral());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006189 AllocationSiteUsageContext site_context(isolate(), site, false);
6190 site_context.EnterNewScope();
6191 literal = BuildFastLiteral(boilerplate_object, &site_context);
6192 site_context.ExitScope(site, boilerplate_object);
6193 } else {
6194 NoObservableSideEffectsScope no_effects(this);
Ben Murdochc5610432016-08-08 18:44:38 +01006195 Handle<FixedArray> constants = expr->constant_elements();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006196 int literal_index = expr->literal_index();
6197 int flags = expr->ComputeFlags(true);
6198
6199 Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
6200 Add<HConstant>(constants), Add<HConstant>(flags));
6201
6202 Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
6203 literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
6204
6205 // Register to deopt if the boilerplate ElementsKind changes.
Ben Murdochc5610432016-08-08 18:44:38 +01006206 if (!site.is_null()) {
6207 top_info()->dependencies()->AssumeTransitionStable(site);
6208 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006209 }
6210
6211 // The array is expected in the bailout environment during computation
6212 // of the property values and is the value of the entire expression.
6213 Push(literal);
6214
6215 HInstruction* elements = NULL;
6216
6217 for (int i = 0; i < length; i++) {
6218 Expression* subexpr = subexprs->at(i);
Ben Murdoch097c5b22016-05-18 11:27:45 +01006219 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006220
6221 // If the subexpression is a literal or a simple materialized literal it
6222 // is already set in the cloned array.
6223 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
6224
6225 CHECK_ALIVE(VisitForValue(subexpr));
6226 HValue* value = Pop();
6227 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
6228
6229 elements = AddLoadElements(literal);
6230
6231 HValue* key = Add<HConstant>(i);
6232
Ben Murdochc5610432016-08-08 18:44:38 +01006233 if (!boilerplate_object.is_null()) {
6234 ElementsKind boilerplate_elements_kind =
6235 boilerplate_object->GetElementsKind();
6236 switch (boilerplate_elements_kind) {
6237 case FAST_SMI_ELEMENTS:
6238 case FAST_HOLEY_SMI_ELEMENTS:
6239 case FAST_ELEMENTS:
6240 case FAST_HOLEY_ELEMENTS:
6241 case FAST_DOUBLE_ELEMENTS:
6242 case FAST_HOLEY_DOUBLE_ELEMENTS: {
6243 Add<HStoreKeyed>(elements, key, value, nullptr,
6244 boilerplate_elements_kind);
6245 break;
6246 }
6247 default:
6248 UNREACHABLE();
6249 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006250 }
Ben Murdochc5610432016-08-08 18:44:38 +01006251 } else {
6252 HInstruction* instr = BuildKeyedGeneric(
6253 STORE, expr, expr->LiteralFeedbackSlot(), literal, key, value);
6254 AddInstruction(instr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006255 }
6256
6257 Add<HSimulate>(expr->GetIdForElement(i));
6258 }
6259
6260 return ast_context()->ReturnValue(Pop());
6261}
6262
6263
6264HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
6265 Handle<Map> map) {
6266 BuildCheckHeapObject(object);
6267 return Add<HCheckMaps>(object, map);
6268}
6269
6270
6271HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
6272 PropertyAccessInfo* info,
6273 HValue* checked_object) {
6274 // See if this is a load for an immutable property
6275 if (checked_object->ActualValue()->IsConstant()) {
6276 Handle<Object> object(
6277 HConstant::cast(checked_object->ActualValue())->handle(isolate()));
6278
6279 if (object->IsJSObject()) {
6280 LookupIterator it(object, info->name(),
6281 LookupIterator::OWN_SKIP_INTERCEPTOR);
6282 Handle<Object> value = JSReceiver::GetDataProperty(&it);
6283 if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
6284 return New<HConstant>(value);
6285 }
6286 }
6287 }
6288
6289 HObjectAccess access = info->access();
6290 if (access.representation().IsDouble() &&
6291 (!FLAG_unbox_double_fields || !access.IsInobject())) {
6292 // Load the heap number.
6293 checked_object = Add<HLoadNamedField>(
6294 checked_object, nullptr,
6295 access.WithRepresentation(Representation::Tagged()));
6296 // Load the double value from it.
6297 access = HObjectAccess::ForHeapNumberValue();
6298 }
6299
6300 SmallMapList* map_list = info->field_maps();
6301 if (map_list->length() == 0) {
6302 return New<HLoadNamedField>(checked_object, checked_object, access);
6303 }
6304
6305 UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
6306 for (int i = 0; i < map_list->length(); ++i) {
6307 maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
6308 }
6309 return New<HLoadNamedField>(
6310 checked_object, checked_object, access, maps, info->field_type());
6311}
6312
6313
6314HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
6315 PropertyAccessInfo* info,
6316 HValue* checked_object,
6317 HValue* value) {
6318 bool transition_to_field = info->IsTransition();
6319 // TODO(verwaest): Move this logic into PropertyAccessInfo.
6320 HObjectAccess field_access = info->access();
6321
6322 HStoreNamedField *instr;
6323 if (field_access.representation().IsDouble() &&
6324 (!FLAG_unbox_double_fields || !field_access.IsInobject())) {
6325 HObjectAccess heap_number_access =
6326 field_access.WithRepresentation(Representation::Tagged());
6327 if (transition_to_field) {
6328 // The store requires a mutable HeapNumber to be allocated.
6329 NoObservableSideEffectsScope no_side_effects(this);
6330 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
6331
6332 // TODO(hpayer): Allocation site pretenuring support.
Ben Murdochc5610432016-08-08 18:44:38 +01006333 HInstruction* heap_number =
6334 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
6335 MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006336 AddStoreMapConstant(
6337 heap_number, isolate()->factory()->mutable_heap_number_map());
6338 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
6339 value);
6340 instr = New<HStoreNamedField>(checked_object->ActualValue(),
6341 heap_number_access,
6342 heap_number);
6343 } else {
6344 // Already holds a HeapNumber; load the box and write its value field.
6345 HInstruction* heap_number =
6346 Add<HLoadNamedField>(checked_object, nullptr, heap_number_access);
6347 instr = New<HStoreNamedField>(heap_number,
6348 HObjectAccess::ForHeapNumberValue(),
6349 value, STORE_TO_INITIALIZED_ENTRY);
6350 }
6351 } else {
6352 if (field_access.representation().IsHeapObject()) {
6353 BuildCheckHeapObject(value);
6354 }
6355
6356 if (!info->field_maps()->is_empty()) {
6357 DCHECK(field_access.representation().IsHeapObject());
6358 value = Add<HCheckMaps>(value, info->field_maps());
6359 }
6360
6361 // This is a normal store.
6362 instr = New<HStoreNamedField>(
6363 checked_object->ActualValue(), field_access, value,
6364 transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
6365 }
6366
6367 if (transition_to_field) {
6368 Handle<Map> transition(info->transition());
6369 DCHECK(!transition->is_deprecated());
6370 instr->SetTransition(Add<HConstant>(transition));
6371 }
6372 return instr;
6373}
6374
Ben Murdoch097c5b22016-05-18 11:27:45 +01006375Handle<FieldType>
6376HOptimizedGraphBuilder::PropertyAccessInfo::GetFieldTypeFromMap(
6377 Handle<Map> map) const {
6378 DCHECK(IsFound());
6379 DCHECK(number_ < map->NumberOfOwnDescriptors());
6380 return handle(map->instance_descriptors()->GetFieldType(number_), isolate());
6381}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006382
6383bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
6384 PropertyAccessInfo* info) {
6385 if (!CanInlinePropertyAccess(map_)) return false;
6386
6387 // Currently only handle Type::Number as a polymorphic case.
6388 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6389 // instruction.
6390 if (IsNumberType()) return false;
6391
6392 // Values are only compatible for monomorphic load if they all behave the same
6393 // regarding value wrappers.
6394 if (IsValueWrapped() != info->IsValueWrapped()) return false;
6395
6396 if (!LookupDescriptor()) return false;
6397
6398 if (!IsFound()) {
6399 return (!info->IsFound() || info->has_holder()) &&
6400 map()->prototype() == info->map()->prototype();
6401 }
6402
6403 // Mismatch if the other access info found the property in the prototype
6404 // chain.
6405 if (info->has_holder()) return false;
6406
6407 if (IsAccessorConstant()) {
6408 return accessor_.is_identical_to(info->accessor_) &&
6409 api_holder_.is_identical_to(info->api_holder_);
6410 }
6411
6412 if (IsDataConstant()) {
6413 return constant_.is_identical_to(info->constant_);
6414 }
6415
6416 DCHECK(IsData());
6417 if (!info->IsData()) return false;
6418
6419 Representation r = access_.representation();
6420 if (IsLoad()) {
6421 if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
6422 } else {
6423 if (!info->access_.representation().IsCompatibleForStore(r)) return false;
6424 }
6425 if (info->access_.offset() != access_.offset()) return false;
6426 if (info->access_.IsInobject() != access_.IsInobject()) return false;
6427 if (IsLoad()) {
6428 if (field_maps_.is_empty()) {
6429 info->field_maps_.Clear();
6430 } else if (!info->field_maps_.is_empty()) {
6431 for (int i = 0; i < field_maps_.length(); ++i) {
6432 info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
6433 }
6434 info->field_maps_.Sort();
6435 }
6436 } else {
6437 // We can only merge stores that agree on their field maps. The comparison
6438 // below is safe, since we keep the field maps sorted.
6439 if (field_maps_.length() != info->field_maps_.length()) return false;
6440 for (int i = 0; i < field_maps_.length(); ++i) {
6441 if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
6442 return false;
6443 }
6444 }
6445 }
6446 info->GeneralizeRepresentation(r);
6447 info->field_type_ = info->field_type_.Combine(field_type_);
6448 return true;
6449}
6450
6451
6452bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
6453 if (!map_->IsJSObjectMap()) return true;
6454 LookupDescriptor(*map_, *name_);
6455 return LoadResult(map_);
6456}
6457
6458
6459bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
6460 if (!IsLoad() && IsProperty() && IsReadOnly()) {
6461 return false;
6462 }
6463
6464 if (IsData()) {
6465 // Construct the object field access.
6466 int index = GetLocalFieldIndexFromMap(map);
6467 access_ = HObjectAccess::ForField(map, index, representation(), name_);
6468
6469 // Load field map for heap objects.
6470 return LoadFieldMaps(map);
6471 } else if (IsAccessorConstant()) {
6472 Handle<Object> accessors = GetAccessorsFromMap(map);
6473 if (!accessors->IsAccessorPair()) return false;
6474 Object* raw_accessor =
6475 IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
6476 : Handle<AccessorPair>::cast(accessors)->setter();
Ben Murdoch097c5b22016-05-18 11:27:45 +01006477 if (!raw_accessor->IsJSFunction() &&
6478 !raw_accessor->IsFunctionTemplateInfo())
6479 return false;
6480 Handle<Object> accessor = handle(HeapObject::cast(raw_accessor));
6481 CallOptimization call_optimization(accessor);
6482 if (call_optimization.is_simple_api_call()) {
6483 CallOptimization::HolderLookup holder_lookup;
6484 api_holder_ =
6485 call_optimization.LookupHolderOfExpectedType(map_, &holder_lookup);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006486 }
6487 accessor_ = accessor;
6488 } else if (IsDataConstant()) {
6489 constant_ = GetConstantFromMap(map);
6490 }
6491
6492 return true;
6493}
6494
6495
6496bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
6497 Handle<Map> map) {
6498 // Clear any previously collected field maps/type.
6499 field_maps_.Clear();
6500 field_type_ = HType::Tagged();
6501
6502 // Figure out the field type from the accessor map.
Ben Murdoch097c5b22016-05-18 11:27:45 +01006503 Handle<FieldType> field_type = GetFieldTypeFromMap(map);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006504
6505 // Collect the (stable) maps from the field type.
Ben Murdoch097c5b22016-05-18 11:27:45 +01006506 if (field_type->IsClass()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006507 DCHECK(access_.representation().IsHeapObject());
Ben Murdoch097c5b22016-05-18 11:27:45 +01006508 Handle<Map> field_map = field_type->AsClass();
6509 if (field_map->is_stable()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006510 field_maps_.Add(field_map, zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006511 }
6512 }
6513
6514 if (field_maps_.is_empty()) {
6515 // Store is not safe if the field map was cleared.
Ben Murdoch097c5b22016-05-18 11:27:45 +01006516 return IsLoad() || !field_type->IsNone();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006517 }
6518
Ben Murdoch097c5b22016-05-18 11:27:45 +01006519 // Determine field HType from field type.
6520 field_type_ = HType::FromFieldType(field_type, zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006521 DCHECK(field_type_.IsHeapObject());
6522
6523 // Add dependency on the map that introduced the field.
6524 top_info()->dependencies()->AssumeFieldType(GetFieldOwnerFromMap(map));
6525 return true;
6526}
6527
6528
6529bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
6530 Handle<Map> map = this->map();
Ben Murdoch097c5b22016-05-18 11:27:45 +01006531 if (name_->IsPrivate()) {
6532 NotFound();
6533 return !map->has_hidden_prototype();
6534 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006535
6536 while (map->prototype()->IsJSObject()) {
6537 holder_ = handle(JSObject::cast(map->prototype()));
6538 if (holder_->map()->is_deprecated()) {
6539 JSObject::TryMigrateInstance(holder_);
6540 }
6541 map = Handle<Map>(holder_->map());
6542 if (!CanInlinePropertyAccess(map)) {
6543 NotFound();
6544 return false;
6545 }
6546 LookupDescriptor(*map, *name_);
6547 if (IsFound()) return LoadResult(map);
6548 }
6549
6550 NotFound();
6551 return !map->prototype()->IsJSReceiver();
6552}
6553
6554
6555bool HOptimizedGraphBuilder::PropertyAccessInfo::IsIntegerIndexedExotic() {
6556 InstanceType instance_type = map_->instance_type();
6557 return instance_type == JS_TYPED_ARRAY_TYPE && name_->IsString() &&
6558 IsSpecialIndex(isolate()->unicode_cache(), String::cast(*name_));
6559}
6560
6561
6562bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
6563 if (!CanInlinePropertyAccess(map_)) return false;
6564 if (IsJSObjectFieldAccessor()) return IsLoad();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006565 if (map_->IsJSFunctionMap() && map_->is_constructor() &&
6566 !map_->has_non_instance_prototype() &&
6567 name_.is_identical_to(isolate()->factory()->prototype_string())) {
6568 return IsLoad();
6569 }
6570 if (!LookupDescriptor()) return false;
6571 if (IsFound()) return IsLoad() || !IsReadOnly();
6572 if (IsIntegerIndexedExotic()) return false;
6573 if (!LookupInPrototypes()) return false;
6574 if (IsLoad()) return true;
6575
6576 if (IsAccessorConstant()) return true;
6577 LookupTransition(*map_, *name_, NONE);
6578 if (IsTransitionToData() && map_->unused_property_fields() > 0) {
6579 // Construct the object field access.
6580 int descriptor = transition()->LastAdded();
6581 int index =
6582 transition()->instance_descriptors()->GetFieldIndex(descriptor) -
6583 map_->GetInObjectProperties();
6584 PropertyDetails details =
6585 transition()->instance_descriptors()->GetDetails(descriptor);
6586 Representation representation = details.representation();
6587 access_ = HObjectAccess::ForField(map_, index, representation, name_);
6588
6589 // Load field map for heap objects.
6590 return LoadFieldMaps(transition());
6591 }
6592 return false;
6593}
6594
6595
6596bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
6597 SmallMapList* maps) {
6598 DCHECK(map_.is_identical_to(maps->first()));
6599 if (!CanAccessMonomorphic()) return false;
6600 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6601 if (maps->length() > kMaxLoadPolymorphism) return false;
6602 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6603 if (GetJSObjectFieldAccess(&access)) {
6604 for (int i = 1; i < maps->length(); ++i) {
6605 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6606 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
6607 if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
6608 if (!access.Equals(test_access)) return false;
6609 }
6610 return true;
6611 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006612
6613 // Currently only handle numbers as a polymorphic case.
6614 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6615 // instruction.
6616 if (IsNumberType()) return false;
6617
6618 // Multiple maps cannot transition to the same target map.
6619 DCHECK(!IsLoad() || !IsTransition());
6620 if (IsTransition() && maps->length() > 1) return false;
6621
6622 for (int i = 1; i < maps->length(); ++i) {
6623 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6624 if (!test_info.IsCompatible(this)) return false;
6625 }
6626
6627 return true;
6628}
6629
6630
6631Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() {
6632 Handle<JSFunction> ctor;
6633 if (Map::GetConstructorFunction(
6634 map_, handle(current_info()->closure()->context()->native_context()))
6635 .ToHandle(&ctor)) {
6636 return handle(ctor->initial_map());
6637 }
6638 return map_;
6639}
6640
6641
6642static bool NeedsWrapping(Handle<Map> map, Handle<JSFunction> target) {
6643 return !map->IsJSObjectMap() &&
6644 is_sloppy(target->shared()->language_mode()) &&
6645 !target->shared()->native();
6646}
6647
6648
6649bool HOptimizedGraphBuilder::PropertyAccessInfo::NeedsWrappingFor(
6650 Handle<JSFunction> target) const {
6651 return NeedsWrapping(map_, target);
6652}
6653
6654
6655HValue* HOptimizedGraphBuilder::BuildMonomorphicAccess(
6656 PropertyAccessInfo* info, HValue* object, HValue* checked_object,
6657 HValue* value, BailoutId ast_id, BailoutId return_id,
6658 bool can_inline_accessor) {
6659 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6660 if (info->GetJSObjectFieldAccess(&access)) {
6661 DCHECK(info->IsLoad());
6662 return New<HLoadNamedField>(object, checked_object, access);
6663 }
6664
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006665 if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
6666 info->map()->IsJSFunctionMap() && info->map()->is_constructor()) {
6667 DCHECK(!info->map()->has_non_instance_prototype());
6668 return New<HLoadFunctionPrototype>(checked_object);
6669 }
6670
6671 HValue* checked_holder = checked_object;
6672 if (info->has_holder()) {
6673 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
6674 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
6675 }
6676
6677 if (!info->IsFound()) {
6678 DCHECK(info->IsLoad());
Ben Murdochda12d292016-06-02 14:46:10 +01006679 return graph()->GetConstantUndefined();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006680 }
6681
6682 if (info->IsData()) {
6683 if (info->IsLoad()) {
6684 return BuildLoadNamedField(info, checked_holder);
6685 } else {
6686 return BuildStoreNamedField(info, checked_object, value);
6687 }
6688 }
6689
6690 if (info->IsTransition()) {
6691 DCHECK(!info->IsLoad());
6692 return BuildStoreNamedField(info, checked_object, value);
6693 }
6694
6695 if (info->IsAccessorConstant()) {
6696 Push(checked_object);
6697 int argument_count = 1;
6698 if (!info->IsLoad()) {
6699 argument_count = 2;
6700 Push(value);
6701 }
6702
Ben Murdoch097c5b22016-05-18 11:27:45 +01006703 if (info->accessor()->IsJSFunction() &&
6704 info->NeedsWrappingFor(Handle<JSFunction>::cast(info->accessor()))) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006705 HValue* function = Add<HConstant>(info->accessor());
6706 PushArgumentsFromEnvironment(argument_count);
Ben Murdochda12d292016-06-02 14:46:10 +01006707 return NewCallFunction(function, argument_count, TailCallMode::kDisallow,
6708 ConvertReceiverMode::kNotNullOrUndefined,
6709 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006710 } else if (FLAG_inline_accessors && can_inline_accessor) {
6711 bool success = info->IsLoad()
6712 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
6713 : TryInlineSetter(
6714 info->accessor(), info->map(), ast_id, return_id, value);
6715 if (success || HasStackOverflow()) return NULL;
6716 }
6717
6718 PushArgumentsFromEnvironment(argument_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01006719 if (!info->accessor()->IsJSFunction()) {
6720 Bailout(kInliningBailedOut);
6721 return nullptr;
6722 }
Ben Murdochda12d292016-06-02 14:46:10 +01006723 return NewCallConstantFunction(Handle<JSFunction>::cast(info->accessor()),
6724 argument_count, TailCallMode::kDisallow,
6725 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006726 }
6727
6728 DCHECK(info->IsDataConstant());
6729 if (info->IsLoad()) {
6730 return New<HConstant>(info->constant());
6731 } else {
6732 return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
6733 }
6734}
6735
6736
6737void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
6738 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
6739 BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value,
6740 SmallMapList* maps, Handle<Name> name) {
6741 // Something did not match; must use a polymorphic load.
6742 int count = 0;
6743 HBasicBlock* join = NULL;
6744 HBasicBlock* number_block = NULL;
6745 bool handled_string = false;
6746
6747 bool handle_smi = false;
6748 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6749 int i;
6750 for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6751 PropertyAccessInfo info(this, access_type, maps->at(i), name);
6752 if (info.IsStringType()) {
6753 if (handled_string) continue;
6754 handled_string = true;
6755 }
6756 if (info.CanAccessMonomorphic()) {
6757 count++;
6758 if (info.IsNumberType()) {
6759 handle_smi = true;
6760 break;
6761 }
6762 }
6763 }
6764
6765 if (i < maps->length()) {
6766 count = -1;
6767 maps->Clear();
6768 } else {
6769 count = 0;
6770 }
6771 HControlInstruction* smi_check = NULL;
6772 handled_string = false;
6773
6774 for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6775 PropertyAccessInfo info(this, access_type, maps->at(i), name);
6776 if (info.IsStringType()) {
6777 if (handled_string) continue;
6778 handled_string = true;
6779 }
6780 if (!info.CanAccessMonomorphic()) continue;
6781
6782 if (count == 0) {
6783 join = graph()->CreateBasicBlock();
6784 if (handle_smi) {
6785 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6786 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6787 number_block = graph()->CreateBasicBlock();
6788 smi_check = New<HIsSmiAndBranch>(
6789 object, empty_smi_block, not_smi_block);
6790 FinishCurrentBlock(smi_check);
6791 GotoNoSimulate(empty_smi_block, number_block);
6792 set_current_block(not_smi_block);
6793 } else {
6794 BuildCheckHeapObject(object);
6795 }
6796 }
6797 ++count;
6798 HBasicBlock* if_true = graph()->CreateBasicBlock();
6799 HBasicBlock* if_false = graph()->CreateBasicBlock();
6800 HUnaryControlInstruction* compare;
6801
6802 HValue* dependency;
6803 if (info.IsNumberType()) {
6804 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6805 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
6806 dependency = smi_check;
6807 } else if (info.IsStringType()) {
6808 compare = New<HIsStringAndBranch>(object, if_true, if_false);
6809 dependency = compare;
6810 } else {
6811 compare = New<HCompareMap>(object, info.map(), if_true, if_false);
6812 dependency = compare;
6813 }
6814 FinishCurrentBlock(compare);
6815
6816 if (info.IsNumberType()) {
6817 GotoNoSimulate(if_true, number_block);
6818 if_true = number_block;
6819 }
6820
6821 set_current_block(if_true);
6822
6823 HValue* access =
6824 BuildMonomorphicAccess(&info, object, dependency, value, ast_id,
6825 return_id, FLAG_polymorphic_inlining);
6826
6827 HValue* result = NULL;
6828 switch (access_type) {
6829 case LOAD:
6830 result = access;
6831 break;
6832 case STORE:
6833 result = value;
6834 break;
6835 }
6836
6837 if (access == NULL) {
6838 if (HasStackOverflow()) return;
6839 } else {
6840 if (access->IsInstruction()) {
6841 HInstruction* instr = HInstruction::cast(access);
6842 if (!instr->IsLinked()) AddInstruction(instr);
6843 }
6844 if (!ast_context()->IsEffect()) Push(result);
6845 }
6846
6847 if (current_block() != NULL) Goto(join);
6848 set_current_block(if_false);
6849 }
6850
6851 // Finish up. Unconditionally deoptimize if we've handled all the maps we
6852 // know about and do not want to handle ones we've never seen. Otherwise
6853 // use a generic IC.
6854 if (count == maps->length() && FLAG_deoptimize_uncommon_cases) {
6855 FinishExitWithHardDeoptimization(
6856 Deoptimizer::kUnknownMapInPolymorphicAccess);
6857 } else {
6858 HInstruction* instr =
6859 BuildNamedGeneric(access_type, expr, slot, object, name, value);
6860 AddInstruction(instr);
6861 if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
6862
6863 if (join != NULL) {
6864 Goto(join);
6865 } else {
6866 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6867 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6868 return;
6869 }
6870 }
6871
6872 DCHECK(join != NULL);
6873 if (join->HasPredecessor()) {
6874 join->SetJoinId(ast_id);
6875 set_current_block(join);
6876 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6877 } else {
6878 set_current_block(NULL);
6879 }
6880}
6881
Ben Murdoch097c5b22016-05-18 11:27:45 +01006882static bool ComputeReceiverTypes(Expression* expr, HValue* receiver,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006883 SmallMapList** t,
Ben Murdoch097c5b22016-05-18 11:27:45 +01006884 HOptimizedGraphBuilder* builder) {
6885 Zone* zone = builder->zone();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006886 SmallMapList* maps = expr->GetReceiverTypes();
6887 *t = maps;
6888 bool monomorphic = expr->IsMonomorphic();
6889 if (maps != NULL && receiver->HasMonomorphicJSObjectType()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01006890 if (maps->length() > 0) {
6891 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
6892 maps->FilterForPossibleTransitions(root_map);
6893 monomorphic = maps->length() == 1;
6894 } else {
6895 // No type feedback, see if we can infer the type. This is safely
6896 // possible if the receiver had a known map at some point, and no
6897 // map-changing stores have happened to it since.
6898 Handle<Map> candidate_map = receiver->GetMonomorphicJSObjectMap();
Ben Murdoch097c5b22016-05-18 11:27:45 +01006899 for (HInstruction* current = builder->current_block()->last();
6900 current != nullptr; current = current->previous()) {
6901 if (current->IsBlockEntry()) break;
6902 if (current->CheckChangesFlag(kMaps)) {
6903 // Only allow map changes that store the candidate map. We don't
6904 // need to care which object the map is being written into.
6905 if (!current->IsStoreNamedField()) break;
6906 HStoreNamedField* map_change = HStoreNamedField::cast(current);
6907 if (!map_change->value()->IsConstant()) break;
6908 HConstant* map_constant = HConstant::cast(map_change->value());
6909 if (!map_constant->representation().IsTagged()) break;
6910 Handle<Object> map = map_constant->handle(builder->isolate());
6911 if (!map.is_identical_to(candidate_map)) break;
6912 }
6913 if (current == receiver) {
6914 // We made it all the way back to the receiver without encountering
6915 // a map change! So we can assume that the receiver still has the
6916 // candidate_map we know about.
6917 maps->Add(candidate_map, zone);
6918 monomorphic = true;
6919 break;
6920 }
6921 }
6922 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006923 }
6924 return monomorphic && CanInlinePropertyAccess(maps->first());
6925}
6926
6927
6928static bool AreStringTypes(SmallMapList* maps) {
6929 for (int i = 0; i < maps->length(); i++) {
6930 if (maps->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6931 }
6932 return true;
6933}
6934
6935
6936void HOptimizedGraphBuilder::BuildStore(Expression* expr, Property* prop,
6937 FeedbackVectorSlot slot,
6938 BailoutId ast_id, BailoutId return_id,
6939 bool is_uninitialized) {
6940 if (!prop->key()->IsPropertyName()) {
6941 // Keyed store.
6942 HValue* value = Pop();
6943 HValue* key = Pop();
6944 HValue* object = Pop();
6945 bool has_side_effects = false;
6946 HValue* result =
6947 HandleKeyedElementAccess(object, key, value, expr, slot, ast_id,
6948 return_id, STORE, &has_side_effects);
6949 if (has_side_effects) {
6950 if (!ast_context()->IsEffect()) Push(value);
6951 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6952 if (!ast_context()->IsEffect()) Drop(1);
6953 }
6954 if (result == NULL) return;
6955 return ast_context()->ReturnValue(value);
6956 }
6957
6958 // Named store.
6959 HValue* value = Pop();
6960 HValue* object = Pop();
6961
6962 Literal* key = prop->key()->AsLiteral();
6963 Handle<String> name = Handle<String>::cast(key->value());
6964 DCHECK(!name.is_null());
6965
6966 HValue* access = BuildNamedAccess(STORE, ast_id, return_id, expr, slot,
6967 object, name, value, is_uninitialized);
6968 if (access == NULL) return;
6969
6970 if (!ast_context()->IsEffect()) Push(value);
6971 if (access->IsInstruction()) AddInstruction(HInstruction::cast(access));
6972 if (access->HasObservableSideEffects()) {
6973 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6974 }
6975 if (!ast_context()->IsEffect()) Drop(1);
6976 return ast_context()->ReturnValue(value);
6977}
6978
6979
6980void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6981 Property* prop = expr->target()->AsProperty();
6982 DCHECK(prop != NULL);
6983 CHECK_ALIVE(VisitForValue(prop->obj()));
6984 if (!prop->key()->IsPropertyName()) {
6985 CHECK_ALIVE(VisitForValue(prop->key()));
6986 }
6987 CHECK_ALIVE(VisitForValue(expr->value()));
6988 BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
6989 expr->AssignmentId(), expr->IsUninitialized());
6990}
6991
6992
6993// Because not every expression has a position and there is not common
6994// superclass of Assignment and CountOperation, we cannot just pass the
6995// owning expression instead of position and ast_id separately.
6996void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
6997 Variable* var, HValue* value, FeedbackVectorSlot slot, BailoutId ast_id) {
6998 Handle<JSGlobalObject> global(current_info()->global_object());
6999
7000 // Lookup in script contexts.
7001 {
7002 Handle<ScriptContextTable> script_contexts(
7003 global->native_context()->script_context_table());
7004 ScriptContextTable::LookupResult lookup;
7005 if (ScriptContextTable::Lookup(script_contexts, var->name(), &lookup)) {
7006 if (lookup.mode == CONST) {
7007 return Bailout(kNonInitializerAssignmentToConst);
7008 }
7009 Handle<Context> script_context =
7010 ScriptContextTable::GetContext(script_contexts, lookup.context_index);
7011
7012 Handle<Object> current_value =
Ben Murdoch097c5b22016-05-18 11:27:45 +01007013 FixedArray::get(*script_context, lookup.slot_index, isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007014
7015 // If the values is not the hole, it will stay initialized,
7016 // so no need to generate a check.
Ben Murdoch61f157c2016-09-16 13:49:30 +01007017 if (current_value->IsTheHole(isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007018 return Bailout(kReferenceToUninitializedVariable);
7019 }
7020
7021 HStoreNamedField* instr = Add<HStoreNamedField>(
7022 Add<HConstant>(script_context),
7023 HObjectAccess::ForContextSlot(lookup.slot_index), value);
7024 USE(instr);
7025 DCHECK(instr->HasObservableSideEffects());
7026 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7027 return;
7028 }
7029 }
7030
7031 LookupIterator it(global, var->name(), LookupIterator::OWN);
7032 GlobalPropertyAccess type = LookupGlobalProperty(var, &it, STORE);
7033 if (type == kUseCell) {
7034 Handle<PropertyCell> cell = it.GetPropertyCell();
7035 top_info()->dependencies()->AssumePropertyCell(cell);
7036 auto cell_type = it.property_details().cell_type();
7037 if (cell_type == PropertyCellType::kConstant ||
7038 cell_type == PropertyCellType::kUndefined) {
7039 Handle<Object> constant(cell->value(), isolate());
7040 if (value->IsConstant()) {
7041 HConstant* c_value = HConstant::cast(value);
7042 if (!constant.is_identical_to(c_value->handle(isolate()))) {
7043 Add<HDeoptimize>(Deoptimizer::kConstantGlobalVariableAssignment,
7044 Deoptimizer::EAGER);
7045 }
7046 } else {
7047 HValue* c_constant = Add<HConstant>(constant);
7048 IfBuilder builder(this);
7049 if (constant->IsNumber()) {
7050 builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
7051 } else {
7052 builder.If<HCompareObjectEqAndBranch>(value, c_constant);
7053 }
7054 builder.Then();
7055 builder.Else();
7056 Add<HDeoptimize>(Deoptimizer::kConstantGlobalVariableAssignment,
7057 Deoptimizer::EAGER);
7058 builder.End();
7059 }
7060 }
7061 HConstant* cell_constant = Add<HConstant>(cell);
7062 auto access = HObjectAccess::ForPropertyCellValue();
7063 if (cell_type == PropertyCellType::kConstantType) {
7064 switch (cell->GetConstantType()) {
7065 case PropertyCellConstantType::kSmi:
7066 access = access.WithRepresentation(Representation::Smi());
7067 break;
7068 case PropertyCellConstantType::kStableMap: {
7069 // The map may no longer be stable, deopt if it's ever different from
7070 // what is currently there, which will allow for restablization.
7071 Handle<Map> map(HeapObject::cast(cell->value())->map());
7072 Add<HCheckHeapObject>(value);
7073 value = Add<HCheckMaps>(value, map);
7074 access = access.WithRepresentation(Representation::HeapObject());
7075 break;
7076 }
7077 }
7078 }
7079 HInstruction* instr = Add<HStoreNamedField>(cell_constant, access, value);
7080 instr->ClearChangesFlag(kInobjectFields);
7081 instr->SetChangesFlag(kGlobalVars);
7082 if (instr->HasObservableSideEffects()) {
7083 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7084 }
7085 } else {
7086 HValue* global_object = Add<HLoadNamedField>(
7087 BuildGetNativeContext(), nullptr,
7088 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007089 Handle<TypeFeedbackVector> vector =
7090 handle(current_feedback_vector(), isolate());
Ben Murdoch61f157c2016-09-16 13:49:30 +01007091 HStoreNamedGeneric* instr =
7092 Add<HStoreNamedGeneric>(global_object, var->name(), value,
7093 function_language_mode(), vector, slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007094 USE(instr);
7095 DCHECK(instr->HasObservableSideEffects());
7096 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7097 }
7098}
7099
7100
7101void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
7102 Expression* target = expr->target();
7103 VariableProxy* proxy = target->AsVariableProxy();
7104 Property* prop = target->AsProperty();
7105 DCHECK(proxy == NULL || prop == NULL);
7106
7107 // We have a second position recorded in the FullCodeGenerator to have
7108 // type feedback for the binary operation.
7109 BinaryOperation* operation = expr->binary_operation();
7110
7111 if (proxy != NULL) {
7112 Variable* var = proxy->var();
7113 if (var->mode() == LET) {
7114 return Bailout(kUnsupportedLetCompoundAssignment);
7115 }
7116
7117 CHECK_ALIVE(VisitForValue(operation));
7118
7119 switch (var->location()) {
7120 case VariableLocation::GLOBAL:
7121 case VariableLocation::UNALLOCATED:
7122 HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
7123 expr->AssignmentId());
7124 break;
7125
7126 case VariableLocation::PARAMETER:
7127 case VariableLocation::LOCAL:
7128 if (var->mode() == CONST_LEGACY) {
7129 return Bailout(kUnsupportedConstCompoundAssignment);
7130 }
7131 if (var->mode() == CONST) {
7132 return Bailout(kNonInitializerAssignmentToConst);
7133 }
7134 BindIfLive(var, Top());
7135 break;
7136
7137 case VariableLocation::CONTEXT: {
7138 // Bail out if we try to mutate a parameter value in a function
7139 // using the arguments object. We do not (yet) correctly handle the
7140 // arguments property of the function.
7141 if (current_info()->scope()->arguments() != NULL) {
7142 // Parameters will be allocated to context slots. We have no
7143 // direct way to detect that the variable is a parameter so we do
7144 // a linear search of the parameter variables.
7145 int count = current_info()->scope()->num_parameters();
7146 for (int i = 0; i < count; ++i) {
7147 if (var == current_info()->scope()->parameter(i)) {
7148 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
7149 }
7150 }
7151 }
7152
7153 HStoreContextSlot::Mode mode;
7154
7155 switch (var->mode()) {
7156 case LET:
7157 mode = HStoreContextSlot::kCheckDeoptimize;
7158 break;
7159 case CONST:
7160 return Bailout(kNonInitializerAssignmentToConst);
7161 case CONST_LEGACY:
Ben Murdochc5610432016-08-08 18:44:38 +01007162 if (is_strict(function_language_mode())) {
7163 return Bailout(kNonInitializerAssignmentToConst);
7164 } else {
7165 return ast_context()->ReturnValue(Pop());
7166 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007167 default:
7168 mode = HStoreContextSlot::kNoCheck;
7169 }
7170
7171 HValue* context = BuildContextChainWalk(var);
7172 HStoreContextSlot* instr = Add<HStoreContextSlot>(
7173 context, var->index(), mode, Top());
7174 if (instr->HasObservableSideEffects()) {
7175 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
7176 }
7177 break;
7178 }
7179
7180 case VariableLocation::LOOKUP:
7181 return Bailout(kCompoundAssignmentToLookupSlot);
7182 }
7183 return ast_context()->ReturnValue(Pop());
7184
7185 } else if (prop != NULL) {
7186 CHECK_ALIVE(VisitForValue(prop->obj()));
7187 HValue* object = Top();
7188 HValue* key = NULL;
7189 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
7190 CHECK_ALIVE(VisitForValue(prop->key()));
7191 key = Top();
7192 }
7193
7194 CHECK_ALIVE(PushLoad(prop, object, key));
7195
7196 CHECK_ALIVE(VisitForValue(expr->value()));
7197 HValue* right = Pop();
7198 HValue* left = Pop();
7199
7200 Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
7201
7202 BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
7203 expr->AssignmentId(), expr->IsUninitialized());
7204 } else {
7205 return Bailout(kInvalidLhsInCompoundAssignment);
7206 }
7207}
7208
7209
7210void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
7211 DCHECK(!HasStackOverflow());
7212 DCHECK(current_block() != NULL);
7213 DCHECK(current_block()->HasPredecessor());
7214
7215 VariableProxy* proxy = expr->target()->AsVariableProxy();
7216 Property* prop = expr->target()->AsProperty();
7217 DCHECK(proxy == NULL || prop == NULL);
7218
7219 if (expr->is_compound()) {
7220 HandleCompoundAssignment(expr);
7221 return;
7222 }
7223
7224 if (prop != NULL) {
7225 HandlePropertyAssignment(expr);
7226 } else if (proxy != NULL) {
7227 Variable* var = proxy->var();
7228
7229 if (var->mode() == CONST) {
7230 if (expr->op() != Token::INIT) {
7231 return Bailout(kNonInitializerAssignmentToConst);
7232 }
7233 } else if (var->mode() == CONST_LEGACY) {
7234 if (expr->op() != Token::INIT) {
Ben Murdochc5610432016-08-08 18:44:38 +01007235 if (is_strict(function_language_mode())) {
7236 return Bailout(kNonInitializerAssignmentToConst);
7237 } else {
7238 CHECK_ALIVE(VisitForValue(expr->value()));
7239 return ast_context()->ReturnValue(Pop());
7240 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007241 }
7242
Ben Murdochc5610432016-08-08 18:44:38 +01007243 // TODO(adamk): Is this required? Legacy const variables are always
7244 // initialized before use.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007245 if (var->IsStackAllocated()) {
7246 // We insert a use of the old value to detect unsupported uses of const
7247 // variables (e.g. initialization inside a loop).
7248 HValue* old_value = environment()->Lookup(var);
7249 Add<HUseConst>(old_value);
7250 }
7251 }
7252
7253 if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
7254
7255 // Handle the assignment.
7256 switch (var->location()) {
7257 case VariableLocation::GLOBAL:
7258 case VariableLocation::UNALLOCATED:
7259 CHECK_ALIVE(VisitForValue(expr->value()));
7260 HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
7261 expr->AssignmentId());
7262 return ast_context()->ReturnValue(Pop());
7263
7264 case VariableLocation::PARAMETER:
7265 case VariableLocation::LOCAL: {
7266 // Perform an initialization check for let declared variables
7267 // or parameters.
7268 if (var->mode() == LET && expr->op() == Token::ASSIGN) {
7269 HValue* env_value = environment()->Lookup(var);
7270 if (env_value == graph()->GetConstantHole()) {
7271 return Bailout(kAssignmentToLetVariableBeforeInitialization);
7272 }
7273 }
7274 // We do not allow the arguments object to occur in a context where it
7275 // may escape, but assignments to stack-allocated locals are
7276 // permitted.
7277 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
7278 HValue* value = Pop();
7279 BindIfLive(var, value);
7280 return ast_context()->ReturnValue(value);
7281 }
7282
7283 case VariableLocation::CONTEXT: {
7284 // Bail out if we try to mutate a parameter value in a function using
7285 // the arguments object. We do not (yet) correctly handle the
7286 // arguments property of the function.
7287 if (current_info()->scope()->arguments() != NULL) {
7288 // Parameters will rewrite to context slots. We have no direct way
7289 // to detect that the variable is a parameter.
7290 int count = current_info()->scope()->num_parameters();
7291 for (int i = 0; i < count; ++i) {
7292 if (var == current_info()->scope()->parameter(i)) {
7293 return Bailout(kAssignmentToParameterInArgumentsObject);
7294 }
7295 }
7296 }
7297
7298 CHECK_ALIVE(VisitForValue(expr->value()));
7299 HStoreContextSlot::Mode mode;
7300 if (expr->op() == Token::ASSIGN) {
7301 switch (var->mode()) {
7302 case LET:
7303 mode = HStoreContextSlot::kCheckDeoptimize;
7304 break;
7305 case CONST:
7306 // This case is checked statically so no need to
7307 // perform checks here
7308 UNREACHABLE();
7309 case CONST_LEGACY:
7310 return ast_context()->ReturnValue(Pop());
7311 default:
7312 mode = HStoreContextSlot::kNoCheck;
7313 }
7314 } else {
7315 DCHECK_EQ(Token::INIT, expr->op());
Ben Murdochc5610432016-08-08 18:44:38 +01007316 mode = HStoreContextSlot::kNoCheck;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007317 }
7318
7319 HValue* context = BuildContextChainWalk(var);
7320 HStoreContextSlot* instr = Add<HStoreContextSlot>(
7321 context, var->index(), mode, Top());
7322 if (instr->HasObservableSideEffects()) {
7323 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
7324 }
7325 return ast_context()->ReturnValue(Pop());
7326 }
7327
7328 case VariableLocation::LOOKUP:
7329 return Bailout(kAssignmentToLOOKUPVariable);
7330 }
7331 } else {
7332 return Bailout(kInvalidLeftHandSideInAssignment);
7333 }
7334}
7335
7336
7337void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
7338 // Generators are not optimized, so we should never get here.
7339 UNREACHABLE();
7340}
7341
7342
7343void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
7344 DCHECK(!HasStackOverflow());
7345 DCHECK(current_block() != NULL);
7346 DCHECK(current_block()->HasPredecessor());
7347 if (!ast_context()->IsEffect()) {
7348 // The parser turns invalid left-hand sides in assignments into throw
7349 // statements, which may not be in effect contexts. We might still try
7350 // to optimize such functions; bail out now if we do.
7351 return Bailout(kInvalidLeftHandSideInAssignment);
7352 }
7353 CHECK_ALIVE(VisitForValue(expr->exception()));
7354
7355 HValue* value = environment()->Pop();
7356 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
7357 Add<HPushArguments>(value);
7358 Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kThrow), 1);
7359 Add<HSimulate>(expr->id());
7360
7361 // If the throw definitely exits the function, we can finish with a dummy
7362 // control flow at this point. This is not the case if the throw is inside
7363 // an inlined function which may be replaced.
7364 if (call_context() == NULL) {
7365 FinishExitCurrentBlock(New<HAbnormalExit>());
7366 }
7367}
7368
7369
7370HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
7371 if (string->IsConstant()) {
7372 HConstant* c_string = HConstant::cast(string);
7373 if (c_string->HasStringValue()) {
7374 return Add<HConstant>(c_string->StringValue()->map()->instance_type());
7375 }
7376 }
7377 return Add<HLoadNamedField>(
7378 Add<HLoadNamedField>(string, nullptr, HObjectAccess::ForMap()), nullptr,
7379 HObjectAccess::ForMapInstanceType());
7380}
7381
7382
7383HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
7384 return AddInstruction(BuildLoadStringLength(string));
7385}
7386
7387
7388HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* string) {
7389 if (string->IsConstant()) {
7390 HConstant* c_string = HConstant::cast(string);
7391 if (c_string->HasStringValue()) {
7392 return New<HConstant>(c_string->StringValue()->length());
7393 }
7394 }
7395 return New<HLoadNamedField>(string, nullptr,
7396 HObjectAccess::ForStringLength());
7397}
7398
7399
7400HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
7401 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
7402 HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized) {
7403 if (is_uninitialized) {
7404 Add<HDeoptimize>(
7405 Deoptimizer::kInsufficientTypeFeedbackForGenericNamedAccess,
7406 Deoptimizer::SOFT);
7407 }
7408 if (access_type == LOAD) {
7409 Handle<TypeFeedbackVector> vector =
7410 handle(current_feedback_vector(), isolate());
7411
7412 if (!expr->AsProperty()->key()->IsPropertyName()) {
7413 // It's possible that a keyed load of a constant string was converted
7414 // to a named load. Here, at the last minute, we need to make sure to
7415 // use a generic Keyed Load if we are using the type vector, because
7416 // it has to share information with full code.
7417 HConstant* key = Add<HConstant>(name);
Ben Murdoch097c5b22016-05-18 11:27:45 +01007418 HLoadKeyedGeneric* result =
Ben Murdoch61f157c2016-09-16 13:49:30 +01007419 New<HLoadKeyedGeneric>(object, key, vector, slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007420 return result;
7421 }
7422
Ben Murdoch097c5b22016-05-18 11:27:45 +01007423 HLoadNamedGeneric* result =
Ben Murdoch61f157c2016-09-16 13:49:30 +01007424 New<HLoadNamedGeneric>(object, name, vector, slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007425 return result;
7426 } else {
Ben Murdoch61f157c2016-09-16 13:49:30 +01007427 Handle<TypeFeedbackVector> vector =
7428 handle(current_feedback_vector(), isolate());
7429
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007430 if (current_feedback_vector()->GetKind(slot) ==
7431 FeedbackVectorSlotKind::KEYED_STORE_IC) {
7432 // It's possible that a keyed store of a constant string was converted
7433 // to a named store. Here, at the last minute, we need to make sure to
7434 // use a generic Keyed Store if we are using the type vector, because
7435 // it has to share information with full code.
7436 HConstant* key = Add<HConstant>(name);
7437 HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
Ben Murdoch61f157c2016-09-16 13:49:30 +01007438 object, key, value, function_language_mode(), vector, slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007439 return result;
7440 }
7441
7442 HStoreNamedGeneric* result = New<HStoreNamedGeneric>(
Ben Murdoch61f157c2016-09-16 13:49:30 +01007443 object, name, value, function_language_mode(), vector, slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007444 return result;
7445 }
7446}
7447
7448
7449HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
7450 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
7451 HValue* object, HValue* key, HValue* value) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01007452 Handle<TypeFeedbackVector> vector =
7453 handle(current_feedback_vector(), isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007454 if (access_type == LOAD) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01007455 HLoadKeyedGeneric* result =
Ben Murdoch61f157c2016-09-16 13:49:30 +01007456 New<HLoadKeyedGeneric>(object, key, vector, slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007457 return result;
7458 } else {
7459 HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
Ben Murdoch61f157c2016-09-16 13:49:30 +01007460 object, key, value, function_language_mode(), vector, slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007461 return result;
7462 }
7463}
7464
7465
7466LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
7467 // Loads from a "stock" fast holey double arrays can elide the hole check.
7468 // Loads from a "stock" fast holey array can convert the hole to undefined
7469 // with impunity.
7470 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
7471 bool holey_double_elements =
7472 *map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS);
7473 bool holey_elements =
7474 *map == isolate()->get_initial_js_array_map(FAST_HOLEY_ELEMENTS);
7475 if ((holey_double_elements || holey_elements) &&
7476 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
7477 load_mode =
7478 holey_double_elements ? ALLOW_RETURN_HOLE : CONVERT_HOLE_TO_UNDEFINED;
7479
7480 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
7481 Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
7482 BuildCheckPrototypeMaps(prototype, object_prototype);
7483 graph()->MarkDependsOnEmptyArrayProtoElements();
7484 }
7485 return load_mode;
7486}
7487
7488
7489HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
7490 HValue* object,
7491 HValue* key,
7492 HValue* val,
7493 HValue* dependency,
7494 Handle<Map> map,
7495 PropertyAccessType access_type,
7496 KeyedAccessStoreMode store_mode) {
7497 HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
7498
7499 if (access_type == STORE && map->prototype()->IsJSObject()) {
7500 // monomorphic stores need a prototype chain check because shape
7501 // changes could allow callbacks on elements in the chain that
7502 // aren't compatible with monomorphic keyed stores.
7503 PrototypeIterator iter(map);
7504 JSObject* holder = NULL;
7505 while (!iter.IsAtEnd()) {
7506 // JSProxies can't occur here because we wouldn't have installed a
7507 // non-generic IC if there were any.
7508 holder = *PrototypeIterator::GetCurrent<JSObject>(iter);
7509 iter.Advance();
7510 }
7511 DCHECK(holder && holder->IsJSObject());
7512
7513 BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
7514 Handle<JSObject>(holder));
7515 }
7516
7517 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7518 return BuildUncheckedMonomorphicElementAccess(
7519 checked_object, key, val,
7520 map->instance_type() == JS_ARRAY_TYPE,
7521 map->elements_kind(), access_type,
7522 load_mode, store_mode);
7523}
7524
7525
7526static bool CanInlineElementAccess(Handle<Map> map) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01007527 return map->IsJSObjectMap() &&
7528 (map->has_fast_elements() || map->has_fixed_typed_array_elements()) &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007529 !map->has_indexed_interceptor() && !map->is_access_check_needed();
7530}
7531
7532
7533HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
7534 HValue* object,
7535 HValue* key,
7536 HValue* val,
7537 SmallMapList* maps) {
7538 // For polymorphic loads of similar elements kinds (i.e. all tagged or all
7539 // double), always use the "worst case" code without a transition. This is
7540 // much faster than transitioning the elements to the worst case, trading a
7541 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
7542 bool has_double_maps = false;
7543 bool has_smi_or_object_maps = false;
7544 bool has_js_array_access = false;
7545 bool has_non_js_array_access = false;
7546 bool has_seen_holey_elements = false;
7547 Handle<Map> most_general_consolidated_map;
7548 for (int i = 0; i < maps->length(); ++i) {
7549 Handle<Map> map = maps->at(i);
7550 if (!CanInlineElementAccess(map)) return NULL;
7551 // Don't allow mixing of JSArrays with JSObjects.
7552 if (map->instance_type() == JS_ARRAY_TYPE) {
7553 if (has_non_js_array_access) return NULL;
7554 has_js_array_access = true;
7555 } else if (has_js_array_access) {
7556 return NULL;
7557 } else {
7558 has_non_js_array_access = true;
7559 }
7560 // Don't allow mixed, incompatible elements kinds.
7561 if (map->has_fast_double_elements()) {
7562 if (has_smi_or_object_maps) return NULL;
7563 has_double_maps = true;
7564 } else if (map->has_fast_smi_or_object_elements()) {
7565 if (has_double_maps) return NULL;
7566 has_smi_or_object_maps = true;
7567 } else {
7568 return NULL;
7569 }
7570 // Remember if we've ever seen holey elements.
7571 if (IsHoleyElementsKind(map->elements_kind())) {
7572 has_seen_holey_elements = true;
7573 }
7574 // Remember the most general elements kind, the code for its load will
7575 // properly handle all of the more specific cases.
7576 if ((i == 0) || IsMoreGeneralElementsKindTransition(
7577 most_general_consolidated_map->elements_kind(),
7578 map->elements_kind())) {
7579 most_general_consolidated_map = map;
7580 }
7581 }
7582 if (!has_double_maps && !has_smi_or_object_maps) return NULL;
7583
7584 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
7585 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
7586 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
7587 ElementsKind consolidated_elements_kind = has_seen_holey_elements
7588 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
7589 : most_general_consolidated_map->elements_kind();
7590 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
7591 if (has_seen_holey_elements) {
7592 // Make sure that all of the maps we are handling have the initial array
7593 // prototype.
7594 bool saw_non_array_prototype = false;
7595 for (int i = 0; i < maps->length(); ++i) {
7596 Handle<Map> map = maps->at(i);
7597 if (map->prototype() != *isolate()->initial_array_prototype()) {
7598 // We can't guarantee that loading the hole is safe. The prototype may
7599 // have an element at this position.
7600 saw_non_array_prototype = true;
7601 break;
7602 }
7603 }
7604
7605 if (!saw_non_array_prototype) {
7606 Handle<Map> holey_map = handle(
7607 isolate()->get_initial_js_array_map(consolidated_elements_kind));
7608 load_mode = BuildKeyedHoleMode(holey_map);
7609 if (load_mode != NEVER_RETURN_HOLE) {
7610 for (int i = 0; i < maps->length(); ++i) {
7611 Handle<Map> map = maps->at(i);
7612 // The prototype check was already done for the holey map in
7613 // BuildKeyedHoleMode.
7614 if (!map.is_identical_to(holey_map)) {
7615 Handle<JSObject> prototype(JSObject::cast(map->prototype()),
7616 isolate());
7617 Handle<JSObject> object_prototype =
7618 isolate()->initial_object_prototype();
7619 BuildCheckPrototypeMaps(prototype, object_prototype);
7620 }
7621 }
7622 }
7623 }
7624 }
7625 HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
7626 checked_object, key, val,
7627 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
7628 consolidated_elements_kind, LOAD, load_mode, STANDARD_STORE);
7629 return instr;
7630}
7631
7632
7633HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
7634 Expression* expr, FeedbackVectorSlot slot, HValue* object, HValue* key,
7635 HValue* val, SmallMapList* maps, PropertyAccessType access_type,
7636 KeyedAccessStoreMode store_mode, bool* has_side_effects) {
7637 *has_side_effects = false;
7638 BuildCheckHeapObject(object);
7639
7640 if (access_type == LOAD) {
7641 HInstruction* consolidated_load =
7642 TryBuildConsolidatedElementLoad(object, key, val, maps);
7643 if (consolidated_load != NULL) {
7644 *has_side_effects |= consolidated_load->HasObservableSideEffects();
7645 return consolidated_load;
7646 }
7647 }
7648
7649 // Elements_kind transition support.
7650 MapHandleList transition_target(maps->length());
7651 // Collect possible transition targets.
7652 MapHandleList possible_transitioned_maps(maps->length());
7653 for (int i = 0; i < maps->length(); ++i) {
7654 Handle<Map> map = maps->at(i);
7655 // Loads from strings or loads with a mix of string and non-string maps
7656 // shouldn't be handled polymorphically.
7657 DCHECK(access_type != LOAD || !map->IsStringMap());
7658 ElementsKind elements_kind = map->elements_kind();
7659 if (CanInlineElementAccess(map) && IsFastElementsKind(elements_kind) &&
7660 elements_kind != GetInitialFastElementsKind()) {
7661 possible_transitioned_maps.Add(map);
7662 }
7663 if (IsSloppyArgumentsElements(elements_kind)) {
7664 HInstruction* result =
7665 BuildKeyedGeneric(access_type, expr, slot, object, key, val);
7666 *has_side_effects = result->HasObservableSideEffects();
7667 return AddInstruction(result);
7668 }
7669 }
7670 // Get transition target for each map (NULL == no transition).
7671 for (int i = 0; i < maps->length(); ++i) {
7672 Handle<Map> map = maps->at(i);
Ben Murdochc5610432016-08-08 18:44:38 +01007673 Map* transitioned_map =
7674 map->FindElementsKindTransitionedMap(&possible_transitioned_maps);
7675 if (transitioned_map != nullptr) {
7676 transition_target.Add(handle(transitioned_map));
7677 } else {
7678 transition_target.Add(Handle<Map>());
7679 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007680 }
7681
7682 MapHandleList untransitionable_maps(maps->length());
7683 HTransitionElementsKind* transition = NULL;
7684 for (int i = 0; i < maps->length(); ++i) {
7685 Handle<Map> map = maps->at(i);
7686 DCHECK(map->IsMap());
7687 if (!transition_target.at(i).is_null()) {
7688 DCHECK(Map::IsValidElementsTransition(
7689 map->elements_kind(),
7690 transition_target.at(i)->elements_kind()));
7691 transition = Add<HTransitionElementsKind>(object, map,
7692 transition_target.at(i));
7693 } else {
7694 untransitionable_maps.Add(map);
7695 }
7696 }
7697
7698 // If only one map is left after transitioning, handle this case
7699 // monomorphically.
7700 DCHECK(untransitionable_maps.length() >= 1);
7701 if (untransitionable_maps.length() == 1) {
7702 Handle<Map> untransitionable_map = untransitionable_maps[0];
7703 HInstruction* instr = NULL;
7704 if (!CanInlineElementAccess(untransitionable_map)) {
7705 instr = AddInstruction(
7706 BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7707 } else {
7708 instr = BuildMonomorphicElementAccess(
7709 object, key, val, transition, untransitionable_map, access_type,
7710 store_mode);
7711 }
7712 *has_side_effects |= instr->HasObservableSideEffects();
7713 return access_type == STORE ? val : instr;
7714 }
7715
7716 HBasicBlock* join = graph()->CreateBasicBlock();
7717
7718 for (int i = 0; i < untransitionable_maps.length(); ++i) {
7719 Handle<Map> map = untransitionable_maps[i];
7720 ElementsKind elements_kind = map->elements_kind();
7721 HBasicBlock* this_map = graph()->CreateBasicBlock();
7722 HBasicBlock* other_map = graph()->CreateBasicBlock();
7723 HCompareMap* mapcompare =
7724 New<HCompareMap>(object, map, this_map, other_map);
7725 FinishCurrentBlock(mapcompare);
7726
7727 set_current_block(this_map);
7728 HInstruction* access = NULL;
7729 if (!CanInlineElementAccess(map)) {
7730 access = AddInstruction(
7731 BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7732 } else {
7733 DCHECK(IsFastElementsKind(elements_kind) ||
7734 IsFixedTypedArrayElementsKind(elements_kind));
7735 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7736 // Happily, mapcompare is a checked object.
7737 access = BuildUncheckedMonomorphicElementAccess(
7738 mapcompare, key, val,
7739 map->instance_type() == JS_ARRAY_TYPE,
7740 elements_kind, access_type,
7741 load_mode,
7742 store_mode);
7743 }
7744 *has_side_effects |= access->HasObservableSideEffects();
7745 // The caller will use has_side_effects and add a correct Simulate.
7746 access->SetFlag(HValue::kHasNoObservableSideEffects);
7747 if (access_type == LOAD) {
7748 Push(access);
7749 }
7750 NoObservableSideEffectsScope scope(this);
7751 GotoNoSimulate(join);
7752 set_current_block(other_map);
7753 }
7754
7755 // Ensure that we visited at least one map above that goes to join. This is
7756 // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
7757 // rather than joining the join block. If this becomes an issue, insert a
7758 // generic access in the case length() == 0.
7759 DCHECK(join->predecessors()->length() > 0);
7760 // Deopt if none of the cases matched.
7761 NoObservableSideEffectsScope scope(this);
7762 FinishExitWithHardDeoptimization(
7763 Deoptimizer::kUnknownMapInPolymorphicElementAccess);
7764 set_current_block(join);
7765 return access_type == STORE ? val : Pop();
7766}
7767
7768
7769HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
7770 HValue* obj, HValue* key, HValue* val, Expression* expr,
7771 FeedbackVectorSlot slot, BailoutId ast_id, BailoutId return_id,
7772 PropertyAccessType access_type, bool* has_side_effects) {
7773 // A keyed name access with type feedback may contain the name.
7774 Handle<TypeFeedbackVector> vector =
7775 handle(current_feedback_vector(), isolate());
7776 HValue* expected_key = key;
7777 if (!key->ActualValue()->IsConstant()) {
7778 Name* name = nullptr;
7779 if (access_type == LOAD) {
7780 KeyedLoadICNexus nexus(vector, slot);
7781 name = nexus.FindFirstName();
7782 } else {
7783 KeyedStoreICNexus nexus(vector, slot);
7784 name = nexus.FindFirstName();
7785 }
7786 if (name != nullptr) {
7787 Handle<Name> handle_name(name);
7788 expected_key = Add<HConstant>(handle_name);
7789 // We need a check against the key.
7790 bool in_new_space = isolate()->heap()->InNewSpace(*handle_name);
7791 Unique<Name> unique_name = Unique<Name>::CreateUninitialized(handle_name);
7792 Add<HCheckValue>(key, unique_name, in_new_space);
7793 }
7794 }
7795 if (expected_key->ActualValue()->IsConstant()) {
7796 Handle<Object> constant =
7797 HConstant::cast(expected_key->ActualValue())->handle(isolate());
7798 uint32_t array_index;
7799 if ((constant->IsString() &&
7800 !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) ||
7801 constant->IsSymbol()) {
7802 if (!constant->IsUniqueName()) {
7803 constant = isolate()->factory()->InternalizeString(
7804 Handle<String>::cast(constant));
7805 }
7806 HValue* access =
7807 BuildNamedAccess(access_type, ast_id, return_id, expr, slot, obj,
7808 Handle<Name>::cast(constant), val, false);
7809 if (access == NULL || access->IsPhi() ||
7810 HInstruction::cast(access)->IsLinked()) {
7811 *has_side_effects = false;
7812 } else {
7813 HInstruction* instr = HInstruction::cast(access);
7814 AddInstruction(instr);
7815 *has_side_effects = instr->HasObservableSideEffects();
7816 }
7817 return access;
7818 }
7819 }
7820
7821 DCHECK(!expr->IsPropertyName());
7822 HInstruction* instr = NULL;
7823
7824 SmallMapList* maps;
Ben Murdoch097c5b22016-05-18 11:27:45 +01007825 bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007826
7827 bool force_generic = false;
7828 if (expr->GetKeyType() == PROPERTY) {
7829 // Non-Generic accesses assume that elements are being accessed, and will
7830 // deopt for non-index keys, which the IC knows will occur.
7831 // TODO(jkummerow): Consider adding proper support for property accesses.
7832 force_generic = true;
7833 monomorphic = false;
7834 } else if (access_type == STORE &&
7835 (monomorphic || (maps != NULL && !maps->is_empty()))) {
7836 // Stores can't be mono/polymorphic if their prototype chain has dictionary
7837 // elements. However a receiver map that has dictionary elements itself
7838 // should be left to normal mono/poly behavior (the other maps may benefit
7839 // from highly optimized stores).
7840 for (int i = 0; i < maps->length(); i++) {
7841 Handle<Map> current_map = maps->at(i);
7842 if (current_map->DictionaryElementsInPrototypeChainOnly()) {
7843 force_generic = true;
7844 monomorphic = false;
7845 break;
7846 }
7847 }
7848 } else if (access_type == LOAD && !monomorphic &&
7849 (maps != NULL && !maps->is_empty())) {
7850 // Polymorphic loads have to go generic if any of the maps are strings.
7851 // If some, but not all of the maps are strings, we should go generic
7852 // because polymorphic access wants to key on ElementsKind and isn't
7853 // compatible with strings.
7854 for (int i = 0; i < maps->length(); i++) {
7855 Handle<Map> current_map = maps->at(i);
7856 if (current_map->IsStringMap()) {
7857 force_generic = true;
7858 break;
7859 }
7860 }
7861 }
7862
7863 if (monomorphic) {
7864 Handle<Map> map = maps->first();
7865 if (!CanInlineElementAccess(map)) {
7866 instr = AddInstruction(
7867 BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7868 } else {
7869 BuildCheckHeapObject(obj);
7870 instr = BuildMonomorphicElementAccess(
7871 obj, key, val, NULL, map, access_type, expr->GetStoreMode());
7872 }
7873 } else if (!force_generic && (maps != NULL && !maps->is_empty())) {
7874 return HandlePolymorphicElementAccess(expr, slot, obj, key, val, maps,
7875 access_type, expr->GetStoreMode(),
7876 has_side_effects);
7877 } else {
7878 if (access_type == STORE) {
7879 if (expr->IsAssignment() &&
7880 expr->AsAssignment()->HasNoTypeInformation()) {
7881 Add<HDeoptimize>(Deoptimizer::kInsufficientTypeFeedbackForKeyedStore,
7882 Deoptimizer::SOFT);
7883 }
7884 } else {
7885 if (expr->AsProperty()->HasNoTypeInformation()) {
7886 Add<HDeoptimize>(Deoptimizer::kInsufficientTypeFeedbackForKeyedLoad,
7887 Deoptimizer::SOFT);
7888 }
7889 }
7890 instr = AddInstruction(
7891 BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7892 }
7893 *has_side_effects = instr->HasObservableSideEffects();
7894 return instr;
7895}
7896
7897
7898void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
7899 // Outermost function already has arguments on the stack.
7900 if (function_state()->outer() == NULL) return;
7901
7902 if (function_state()->arguments_pushed()) return;
7903
7904 // Push arguments when entering inlined function.
7905 HEnterInlined* entry = function_state()->entry();
7906 entry->set_arguments_pushed();
7907
7908 HArgumentsObject* arguments = entry->arguments_object();
7909 const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
7910
7911 HInstruction* insert_after = entry;
7912 for (int i = 0; i < arguments_values->length(); i++) {
7913 HValue* argument = arguments_values->at(i);
7914 HInstruction* push_argument = New<HPushArguments>(argument);
7915 push_argument->InsertAfter(insert_after);
7916 insert_after = push_argument;
7917 }
7918
7919 HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
7920 arguments_elements->ClearFlag(HValue::kUseGVN);
7921 arguments_elements->InsertAfter(insert_after);
7922 function_state()->set_arguments_elements(arguments_elements);
7923}
7924
7925
7926bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
7927 VariableProxy* proxy = expr->obj()->AsVariableProxy();
7928 if (proxy == NULL) return false;
7929 if (!proxy->var()->IsStackAllocated()) return false;
7930 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
7931 return false;
7932 }
7933
7934 HInstruction* result = NULL;
7935 if (expr->key()->IsPropertyName()) {
7936 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7937 if (!String::Equals(name, isolate()->factory()->length_string())) {
7938 return false;
7939 }
7940
Ben Murdoch61f157c2016-09-16 13:49:30 +01007941 // Make sure we visit the arguments object so that the liveness analysis
7942 // still records the access.
7943 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->obj(), ARGUMENTS_ALLOWED), true);
7944 Drop(1);
7945
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007946 if (function_state()->outer() == NULL) {
7947 HInstruction* elements = Add<HArgumentsElements>(false);
7948 result = New<HArgumentsLength>(elements);
7949 } else {
7950 // Number of arguments without receiver.
7951 int argument_count = environment()->
7952 arguments_environment()->parameter_count() - 1;
7953 result = New<HConstant>(argument_count);
7954 }
7955 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01007956 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->obj(), ARGUMENTS_ALLOWED), true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007957 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
7958 HValue* key = Pop();
7959 Drop(1); // Arguments object.
7960 if (function_state()->outer() == NULL) {
7961 HInstruction* elements = Add<HArgumentsElements>(false);
7962 HInstruction* length = Add<HArgumentsLength>(elements);
7963 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7964 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7965 } else {
7966 EnsureArgumentsArePushedForAccess();
7967
7968 // Number of arguments without receiver.
7969 HInstruction* elements = function_state()->arguments_elements();
7970 int argument_count = environment()->
7971 arguments_environment()->parameter_count() - 1;
7972 HInstruction* length = Add<HConstant>(argument_count);
7973 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7974 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7975 }
7976 }
7977 ast_context()->ReturnInstruction(result, expr->id());
7978 return true;
7979}
7980
7981
7982HValue* HOptimizedGraphBuilder::BuildNamedAccess(
7983 PropertyAccessType access, BailoutId ast_id, BailoutId return_id,
7984 Expression* expr, FeedbackVectorSlot slot, HValue* object,
7985 Handle<Name> name, HValue* value, bool is_uninitialized) {
7986 SmallMapList* maps;
Ben Murdoch097c5b22016-05-18 11:27:45 +01007987 ComputeReceiverTypes(expr, object, &maps, this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007988 DCHECK(maps != NULL);
7989
7990 if (maps->length() > 0) {
7991 PropertyAccessInfo info(this, access, maps->first(), name);
7992 if (!info.CanAccessAsMonomorphic(maps)) {
7993 HandlePolymorphicNamedFieldAccess(access, expr, slot, ast_id, return_id,
7994 object, value, maps, name);
7995 return NULL;
7996 }
7997
7998 HValue* checked_object;
7999 // Type::Number() is only supported by polymorphic load/call handling.
8000 DCHECK(!info.IsNumberType());
8001 BuildCheckHeapObject(object);
8002 if (AreStringTypes(maps)) {
8003 checked_object =
8004 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
8005 } else {
8006 checked_object = Add<HCheckMaps>(object, maps);
8007 }
8008 return BuildMonomorphicAccess(
8009 &info, object, checked_object, value, ast_id, return_id);
8010 }
8011
8012 return BuildNamedGeneric(access, expr, slot, object, name, value,
8013 is_uninitialized);
8014}
8015
8016
8017void HOptimizedGraphBuilder::PushLoad(Property* expr,
8018 HValue* object,
8019 HValue* key) {
8020 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
8021 Push(object);
8022 if (key != NULL) Push(key);
8023 BuildLoad(expr, expr->LoadId());
8024}
8025
8026
8027void HOptimizedGraphBuilder::BuildLoad(Property* expr,
8028 BailoutId ast_id) {
8029 HInstruction* instr = NULL;
8030 if (expr->IsStringAccess() && expr->GetKeyType() == ELEMENT) {
8031 HValue* index = Pop();
8032 HValue* string = Pop();
8033 HInstruction* char_code = BuildStringCharCodeAt(string, index);
8034 AddInstruction(char_code);
8035 instr = NewUncasted<HStringCharFromCode>(char_code);
8036
8037 } else if (expr->key()->IsPropertyName()) {
8038 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
8039 HValue* object = Pop();
8040
8041 HValue* value = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
8042 expr->PropertyFeedbackSlot(), object, name,
8043 NULL, expr->IsUninitialized());
8044 if (value == NULL) return;
8045 if (value->IsPhi()) return ast_context()->ReturnValue(value);
8046 instr = HInstruction::cast(value);
8047 if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
8048
8049 } else {
8050 HValue* key = Pop();
8051 HValue* obj = Pop();
8052
8053 bool has_side_effects = false;
8054 HValue* load = HandleKeyedElementAccess(
8055 obj, key, NULL, expr, expr->PropertyFeedbackSlot(), ast_id,
8056 expr->LoadId(), LOAD, &has_side_effects);
8057 if (has_side_effects) {
8058 if (ast_context()->IsEffect()) {
8059 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8060 } else {
8061 Push(load);
8062 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8063 Drop(1);
8064 }
8065 }
8066 if (load == NULL) return;
8067 return ast_context()->ReturnValue(load);
8068 }
8069 return ast_context()->ReturnInstruction(instr, ast_id);
8070}
8071
8072
8073void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
8074 DCHECK(!HasStackOverflow());
8075 DCHECK(current_block() != NULL);
8076 DCHECK(current_block()->HasPredecessor());
8077
8078 if (TryArgumentsAccess(expr)) return;
8079
8080 CHECK_ALIVE(VisitForValue(expr->obj()));
8081 if (!expr->key()->IsPropertyName() || expr->IsStringAccess()) {
8082 CHECK_ALIVE(VisitForValue(expr->key()));
8083 }
8084
8085 BuildLoad(expr, expr->id());
8086}
8087
8088
8089HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
8090 HCheckMaps* check = Add<HCheckMaps>(
8091 Add<HConstant>(constant), handle(constant->map()));
8092 check->ClearDependsOnFlag(kElementsKind);
8093 return check;
8094}
8095
8096
8097HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
8098 Handle<JSObject> holder) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01008099 PrototypeIterator iter(isolate(), prototype, kStartAtReceiver);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008100 while (holder.is_null() ||
8101 !PrototypeIterator::GetCurrent(iter).is_identical_to(holder)) {
8102 BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
8103 iter.Advance();
8104 if (iter.IsAtEnd()) {
8105 return NULL;
8106 }
8107 }
8108 return BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
8109}
8110
8111
8112void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
8113 Handle<Map> receiver_map) {
8114 if (!holder.is_null()) {
8115 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8116 BuildCheckPrototypeMaps(prototype, holder);
8117 }
8118}
8119
Ben Murdochda12d292016-06-02 14:46:10 +01008120void HOptimizedGraphBuilder::BuildEnsureCallable(HValue* object) {
8121 NoObservableSideEffectsScope scope(this);
8122 const Runtime::Function* throw_called_non_callable =
8123 Runtime::FunctionForId(Runtime::kThrowCalledNonCallable);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008124
Ben Murdochda12d292016-06-02 14:46:10 +01008125 IfBuilder is_not_function(this);
8126 HValue* smi_check = is_not_function.If<HIsSmiAndBranch>(object);
8127 is_not_function.Or();
8128 HValue* map = AddLoadMap(object, smi_check);
8129 HValue* bit_field =
8130 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField());
8131 HValue* bit_field_masked = AddUncasted<HBitwise>(
8132 Token::BIT_AND, bit_field, Add<HConstant>(1 << Map::kIsCallable));
8133 is_not_function.IfNot<HCompareNumericAndBranch>(
8134 bit_field_masked, Add<HConstant>(1 << Map::kIsCallable), Token::EQ);
8135 is_not_function.Then();
8136 {
8137 Add<HPushArguments>(object);
8138 Add<HCallRuntime>(throw_called_non_callable, 1);
8139 }
8140 is_not_function.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008141}
8142
Ben Murdochda12d292016-06-02 14:46:10 +01008143HInstruction* HOptimizedGraphBuilder::NewCallFunction(
8144 HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode,
8145 ConvertReceiverMode convert_mode, TailCallMode tail_call_mode) {
8146 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
8147 BuildEnsureCallable(function);
8148 } else {
8149 DCHECK_EQ(TailCallMode::kDisallow, tail_call_mode);
8150 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008151 HValue* arity = Add<HConstant>(argument_count - 1);
8152
Ben Murdochda12d292016-06-02 14:46:10 +01008153 HValue* op_vals[] = {context(), function, arity};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008154
Ben Murdochda12d292016-06-02 14:46:10 +01008155 Callable callable =
8156 CodeFactory::Call(isolate(), convert_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008157 HConstant* stub = Add<HConstant>(callable.code());
8158
8159 return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
Ben Murdochc5610432016-08-08 18:44:38 +01008160 ArrayVector(op_vals),
Ben Murdochda12d292016-06-02 14:46:10 +01008161 syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008162}
8163
Ben Murdochda12d292016-06-02 14:46:10 +01008164HInstruction* HOptimizedGraphBuilder::NewCallFunctionViaIC(
8165 HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode,
8166 ConvertReceiverMode convert_mode, TailCallMode tail_call_mode,
8167 FeedbackVectorSlot slot) {
8168 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
8169 BuildEnsureCallable(function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008170 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01008171 DCHECK_EQ(TailCallMode::kDisallow, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008172 }
Ben Murdochda12d292016-06-02 14:46:10 +01008173 int arity = argument_count - 1;
8174 Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
8175 HValue* index_val = Add<HConstant>(vector->GetIndex(slot));
8176 HValue* vector_val = Add<HConstant>(vector);
8177
8178 HValue* op_vals[] = {context(), function, index_val, vector_val};
8179
8180 Callable callable = CodeFactory::CallICInOptimizedCode(
8181 isolate(), arity, convert_mode, tail_call_mode);
8182 HConstant* stub = Add<HConstant>(callable.code());
8183
8184 return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
Ben Murdochc5610432016-08-08 18:44:38 +01008185 ArrayVector(op_vals),
Ben Murdochda12d292016-06-02 14:46:10 +01008186 syntactic_tail_call_mode);
8187}
8188
8189HInstruction* HOptimizedGraphBuilder::NewCallConstantFunction(
8190 Handle<JSFunction> function, int argument_count,
8191 TailCallMode syntactic_tail_call_mode, TailCallMode tail_call_mode) {
8192 HValue* target = Add<HConstant>(function);
8193 return New<HInvokeFunction>(target, function, argument_count,
8194 syntactic_tail_call_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008195}
8196
8197
8198class FunctionSorter {
8199 public:
8200 explicit FunctionSorter(int index = 0, int ticks = 0, int size = 0)
8201 : index_(index), ticks_(ticks), size_(size) {}
8202
8203 int index() const { return index_; }
8204 int ticks() const { return ticks_; }
8205 int size() const { return size_; }
8206
8207 private:
8208 int index_;
8209 int ticks_;
8210 int size_;
8211};
8212
8213
8214inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
8215 int diff = lhs.ticks() - rhs.ticks();
8216 if (diff != 0) return diff > 0;
8217 return lhs.size() < rhs.size();
8218}
8219
8220
8221void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
8222 HValue* receiver,
8223 SmallMapList* maps,
8224 Handle<String> name) {
8225 int argument_count = expr->arguments()->length() + 1; // Includes receiver.
8226 FunctionSorter order[kMaxCallPolymorphism];
8227
8228 bool handle_smi = false;
8229 bool handled_string = false;
8230 int ordered_functions = 0;
8231
Ben Murdochda12d292016-06-02 14:46:10 +01008232 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
8233 TailCallMode tail_call_mode =
8234 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
8235
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008236 int i;
8237 for (i = 0; i < maps->length() && ordered_functions < kMaxCallPolymorphism;
8238 ++i) {
8239 PropertyAccessInfo info(this, LOAD, maps->at(i), name);
8240 if (info.CanAccessMonomorphic() && info.IsDataConstant() &&
8241 info.constant()->IsJSFunction()) {
8242 if (info.IsStringType()) {
8243 if (handled_string) continue;
8244 handled_string = true;
8245 }
8246 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
8247 if (info.IsNumberType()) {
8248 handle_smi = true;
8249 }
8250 expr->set_target(target);
8251 order[ordered_functions++] = FunctionSorter(
8252 i, target->shared()->profiler_ticks(), InliningAstSize(target));
8253 }
8254 }
8255
8256 std::sort(order, order + ordered_functions);
8257
8258 if (i < maps->length()) {
8259 maps->Clear();
8260 ordered_functions = -1;
8261 }
8262
8263 HBasicBlock* number_block = NULL;
8264 HBasicBlock* join = NULL;
8265 handled_string = false;
8266 int count = 0;
8267
8268 for (int fn = 0; fn < ordered_functions; ++fn) {
8269 int i = order[fn].index();
8270 PropertyAccessInfo info(this, LOAD, maps->at(i), name);
8271 if (info.IsStringType()) {
8272 if (handled_string) continue;
8273 handled_string = true;
8274 }
8275 // Reloads the target.
8276 info.CanAccessMonomorphic();
8277 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
8278
8279 expr->set_target(target);
8280 if (count == 0) {
8281 // Only needed once.
8282 join = graph()->CreateBasicBlock();
8283 if (handle_smi) {
8284 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
8285 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
8286 number_block = graph()->CreateBasicBlock();
8287 FinishCurrentBlock(New<HIsSmiAndBranch>(
8288 receiver, empty_smi_block, not_smi_block));
8289 GotoNoSimulate(empty_smi_block, number_block);
8290 set_current_block(not_smi_block);
8291 } else {
8292 BuildCheckHeapObject(receiver);
8293 }
8294 }
8295 ++count;
8296 HBasicBlock* if_true = graph()->CreateBasicBlock();
8297 HBasicBlock* if_false = graph()->CreateBasicBlock();
8298 HUnaryControlInstruction* compare;
8299
8300 Handle<Map> map = info.map();
8301 if (info.IsNumberType()) {
8302 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
8303 compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
8304 } else if (info.IsStringType()) {
8305 compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
8306 } else {
8307 compare = New<HCompareMap>(receiver, map, if_true, if_false);
8308 }
8309 FinishCurrentBlock(compare);
8310
8311 if (info.IsNumberType()) {
8312 GotoNoSimulate(if_true, number_block);
8313 if_true = number_block;
8314 }
8315
8316 set_current_block(if_true);
8317
8318 AddCheckPrototypeMaps(info.holder(), map);
8319
8320 HValue* function = Add<HConstant>(expr->target());
8321 environment()->SetExpressionStackAt(0, function);
8322 Push(receiver);
8323 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8324 bool needs_wrapping = info.NeedsWrappingFor(target);
8325 bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
8326 if (FLAG_trace_inlining && try_inline) {
8327 Handle<JSFunction> caller = current_info()->closure();
8328 base::SmartArrayPointer<char> caller_name =
8329 caller->shared()->DebugName()->ToCString();
8330 PrintF("Trying to inline the polymorphic call to %s from %s\n",
8331 name->ToCString().get(),
8332 caller_name.get());
8333 }
8334 if (try_inline && TryInlineCall(expr)) {
8335 // Trying to inline will signal that we should bailout from the
8336 // entire compilation by setting stack overflow on the visitor.
8337 if (HasStackOverflow()) return;
8338 } else {
8339 // Since HWrapReceiver currently cannot actually wrap numbers and strings,
Ben Murdochda12d292016-06-02 14:46:10 +01008340 // use the regular call builtin for method calls to wrap the receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008341 // TODO(verwaest): Support creation of value wrappers directly in
8342 // HWrapReceiver.
8343 HInstruction* call =
Ben Murdochda12d292016-06-02 14:46:10 +01008344 needs_wrapping
8345 ? NewCallFunction(
8346 function, argument_count, syntactic_tail_call_mode,
8347 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode)
8348 : NewCallConstantFunction(target, argument_count,
8349 syntactic_tail_call_mode,
8350 tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008351 PushArgumentsFromEnvironment(argument_count);
8352 AddInstruction(call);
8353 Drop(1); // Drop the function.
8354 if (!ast_context()->IsEffect()) Push(call);
8355 }
8356
8357 if (current_block() != NULL) Goto(join);
8358 set_current_block(if_false);
8359 }
8360
8361 // Finish up. Unconditionally deoptimize if we've handled all the maps we
8362 // know about and do not want to handle ones we've never seen. Otherwise
8363 // use a generic IC.
8364 if (ordered_functions == maps->length() && FLAG_deoptimize_uncommon_cases) {
8365 FinishExitWithHardDeoptimization(Deoptimizer::kUnknownMapInPolymorphicCall);
8366 } else {
8367 Property* prop = expr->expression()->AsProperty();
8368 HInstruction* function =
8369 BuildNamedGeneric(LOAD, prop, prop->PropertyFeedbackSlot(), receiver,
8370 name, NULL, prop->IsUninitialized());
8371 AddInstruction(function);
8372 Push(function);
8373 AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
8374
8375 environment()->SetExpressionStackAt(1, function);
8376 environment()->SetExpressionStackAt(0, receiver);
8377 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8378
Ben Murdochda12d292016-06-02 14:46:10 +01008379 HInstruction* call = NewCallFunction(
8380 function, argument_count, syntactic_tail_call_mode,
8381 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008382
8383 PushArgumentsFromEnvironment(argument_count);
8384
8385 Drop(1); // Function.
8386
8387 if (join != NULL) {
8388 AddInstruction(call);
8389 if (!ast_context()->IsEffect()) Push(call);
8390 Goto(join);
8391 } else {
8392 return ast_context()->ReturnInstruction(call, expr->id());
8393 }
8394 }
8395
8396 // We assume that control flow is always live after an expression. So
8397 // even without predecessors to the join block, we set it as the exit
8398 // block and continue by adding instructions there.
8399 DCHECK(join != NULL);
8400 if (join->HasPredecessor()) {
8401 set_current_block(join);
8402 join->SetJoinId(expr->id());
8403 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
8404 } else {
8405 set_current_block(NULL);
8406 }
8407}
8408
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008409void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
8410 Handle<JSFunction> caller,
Ben Murdochda12d292016-06-02 14:46:10 +01008411 const char* reason,
8412 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008413 if (FLAG_trace_inlining) {
8414 base::SmartArrayPointer<char> target_name =
8415 target->shared()->DebugName()->ToCString();
8416 base::SmartArrayPointer<char> caller_name =
8417 caller->shared()->DebugName()->ToCString();
8418 if (reason == NULL) {
Ben Murdochda12d292016-06-02 14:46:10 +01008419 const char* call_mode =
8420 tail_call_mode == TailCallMode::kAllow ? "tail called" : "called";
8421 PrintF("Inlined %s %s from %s.\n", target_name.get(), call_mode,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008422 caller_name.get());
8423 } else {
8424 PrintF("Did not inline %s called from %s (%s).\n",
8425 target_name.get(), caller_name.get(), reason);
8426 }
8427 }
8428}
8429
8430
8431static const int kNotInlinable = 1000000000;
8432
8433
8434int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
8435 if (!FLAG_use_inlining) return kNotInlinable;
8436
8437 // Precondition: call is monomorphic and we have found a target with the
8438 // appropriate arity.
8439 Handle<JSFunction> caller = current_info()->closure();
8440 Handle<SharedFunctionInfo> target_shared(target->shared());
8441
8442 // Always inline functions that force inlining.
8443 if (target_shared->force_inline()) {
8444 return 0;
8445 }
8446 if (target->shared()->IsBuiltin()) {
8447 return kNotInlinable;
8448 }
8449
8450 if (target_shared->IsApiFunction()) {
8451 TraceInline(target, caller, "target is api function");
8452 return kNotInlinable;
8453 }
8454
8455 // Do a quick check on source code length to avoid parsing large
8456 // inlining candidates.
8457 if (target_shared->SourceSize() >
8458 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
8459 TraceInline(target, caller, "target text too big");
8460 return kNotInlinable;
8461 }
8462
8463 // Target must be inlineable.
8464 BailoutReason noopt_reason = target_shared->disable_optimization_reason();
8465 if (!target_shared->IsInlineable() && noopt_reason != kHydrogenFilter) {
8466 TraceInline(target, caller, "target not inlineable");
8467 return kNotInlinable;
8468 }
8469 if (noopt_reason != kNoReason && noopt_reason != kHydrogenFilter) {
8470 TraceInline(target, caller, "target contains unsupported syntax [early]");
8471 return kNotInlinable;
8472 }
8473
8474 int nodes_added = target_shared->ast_node_count();
8475 return nodes_added;
8476}
8477
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008478bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
8479 int arguments_count,
8480 HValue* implicit_return_value,
8481 BailoutId ast_id, BailoutId return_id,
Ben Murdochda12d292016-06-02 14:46:10 +01008482 InliningKind inlining_kind,
8483 TailCallMode syntactic_tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008484 if (target->context()->native_context() !=
8485 top_info()->closure()->context()->native_context()) {
8486 return false;
8487 }
8488 int nodes_added = InliningAstSize(target);
8489 if (nodes_added == kNotInlinable) return false;
8490
8491 Handle<JSFunction> caller = current_info()->closure();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008492 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8493 TraceInline(target, caller, "target AST is too large [early]");
8494 return false;
8495 }
8496
8497 // Don't inline deeper than the maximum number of inlining levels.
8498 HEnvironment* env = environment();
8499 int current_level = 1;
8500 while (env->outer() != NULL) {
8501 if (current_level == FLAG_max_inlining_levels) {
8502 TraceInline(target, caller, "inline depth limit reached");
8503 return false;
8504 }
8505 if (env->outer()->frame_type() == JS_FUNCTION) {
8506 current_level++;
8507 }
8508 env = env->outer();
8509 }
8510
8511 // Don't inline recursive functions.
8512 for (FunctionState* state = function_state();
8513 state != NULL;
8514 state = state->outer()) {
8515 if (*state->compilation_info()->closure() == *target) {
8516 TraceInline(target, caller, "target is recursive");
8517 return false;
8518 }
8519 }
8520
8521 // We don't want to add more than a certain number of nodes from inlining.
8522 // Always inline small methods (<= 10 nodes).
8523 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
8524 kUnlimitedMaxInlinedNodesCumulative)) {
8525 TraceInline(target, caller, "cumulative AST node limit reached");
8526 return false;
8527 }
8528
8529 // Parse and allocate variables.
8530 // Use the same AstValueFactory for creating strings in the sub-compilation
8531 // step, but don't transfer ownership to target_info.
8532 ParseInfo parse_info(zone(), target);
8533 parse_info.set_ast_value_factory(
8534 top_info()->parse_info()->ast_value_factory());
8535 parse_info.set_ast_value_factory_owned(false);
8536
Ben Murdochc5610432016-08-08 18:44:38 +01008537 CompilationInfo target_info(&parse_info, target);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008538 Handle<SharedFunctionInfo> target_shared(target->shared());
8539
Ben Murdoch097c5b22016-05-18 11:27:45 +01008540 if (inlining_kind != CONSTRUCT_CALL_RETURN &&
8541 IsClassConstructor(target_shared->kind())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008542 TraceInline(target, caller, "target is classConstructor");
8543 return false;
8544 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01008545
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008546 if (target_shared->HasDebugInfo()) {
8547 TraceInline(target, caller, "target is being debugged");
8548 return false;
8549 }
8550 if (!Compiler::ParseAndAnalyze(target_info.parse_info())) {
8551 if (target_info.isolate()->has_pending_exception()) {
8552 // Parse or scope error, never optimize this function.
8553 SetStackOverflow();
8554 target_shared->DisableOptimization(kParseScopeError);
8555 }
8556 TraceInline(target, caller, "parse failure");
8557 return false;
8558 }
Ben Murdochc5610432016-08-08 18:44:38 +01008559 if (target_shared->dont_crankshaft()) {
8560 TraceInline(target, caller, "ParseAndAnalyze found incompatibility");
8561 return false;
8562 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008563
8564 if (target_info.scope()->num_heap_slots() > 0) {
8565 TraceInline(target, caller, "target has context-allocated variables");
8566 return false;
8567 }
8568
8569 int rest_index;
8570 Variable* rest = target_info.scope()->rest_parameter(&rest_index);
8571 if (rest) {
8572 TraceInline(target, caller, "target uses rest parameters");
8573 return false;
8574 }
8575
8576 FunctionLiteral* function = target_info.literal();
8577
8578 // The following conditions must be checked again after re-parsing, because
8579 // earlier the information might not have been complete due to lazy parsing.
8580 nodes_added = function->ast_node_count();
8581 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8582 TraceInline(target, caller, "target AST is too large [late]");
8583 return false;
8584 }
8585 if (function->dont_optimize()) {
8586 TraceInline(target, caller, "target contains unsupported syntax [late]");
8587 return false;
8588 }
8589
8590 // If the function uses the arguments object check that inlining of functions
8591 // with arguments object is enabled and the arguments-variable is
8592 // stack allocated.
8593 if (function->scope()->arguments() != NULL) {
8594 if (!FLAG_inline_arguments) {
8595 TraceInline(target, caller, "target uses arguments object");
8596 return false;
8597 }
8598 }
8599
8600 // Unsupported variable references present.
8601 if (function->scope()->this_function_var() != nullptr ||
8602 function->scope()->new_target_var() != nullptr) {
8603 TraceInline(target, caller, "target uses new target or this function");
8604 return false;
8605 }
8606
8607 // All declarations must be inlineable.
8608 ZoneList<Declaration*>* decls = target_info.scope()->declarations();
8609 int decl_count = decls->length();
8610 for (int i = 0; i < decl_count; ++i) {
8611 if (!decls->at(i)->IsInlineable()) {
8612 TraceInline(target, caller, "target has non-trivial declaration");
8613 return false;
8614 }
8615 }
8616
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008617 // Generate the deoptimization data for the unoptimized version of
8618 // the target function if we don't already have it.
8619 if (!Compiler::EnsureDeoptimizationSupport(&target_info)) {
8620 TraceInline(target, caller, "could not generate deoptimization info");
8621 return false;
8622 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01008623
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008624 // Remember that we inlined this function. This needs to be called right
8625 // after the EnsureDeoptimizationSupport call so that the code flusher
8626 // does not remove the code with the deoptimization support.
8627 top_info()->AddInlinedFunction(target_info.shared_info());
8628
8629 // ----------------------------------------------------------------
8630 // After this point, we've made a decision to inline this function (so
8631 // TryInline should always return true).
8632
Ben Murdoch61f157c2016-09-16 13:49:30 +01008633 // If target was lazily compiled, it's literals array may not yet be set up.
8634 JSFunction::EnsureLiterals(target);
8635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008636 // Type-check the inlined function.
8637 DCHECK(target_shared->has_deoptimization_support());
8638 AstTyper(target_info.isolate(), target_info.zone(), target_info.closure(),
Ben Murdochc5610432016-08-08 18:44:38 +01008639 target_info.scope(), target_info.osr_ast_id(), target_info.literal(),
8640 &bounds_)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008641 .Run();
8642
8643 int inlining_id = 0;
8644 if (top_info()->is_tracking_positions()) {
Ben Murdochc5610432016-08-08 18:44:38 +01008645 inlining_id = TraceInlinedFunction(target_shared, source_position());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008646 }
8647
8648 // Save the pending call context. Set up new one for the inlined function.
8649 // The function state is new-allocated because we need to delete it
8650 // in two different places.
Ben Murdochda12d292016-06-02 14:46:10 +01008651 FunctionState* target_state = new FunctionState(
8652 this, &target_info, inlining_kind, inlining_id,
8653 function_state()->ComputeTailCallMode(syntactic_tail_call_mode));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008654
8655 HConstant* undefined = graph()->GetConstantUndefined();
8656
Ben Murdochda12d292016-06-02 14:46:10 +01008657 HEnvironment* inner_env = environment()->CopyForInlining(
8658 target, arguments_count, function, undefined,
8659 function_state()->inlining_kind(), syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008660
8661 HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
8662 inner_env->BindContext(context);
8663
8664 // Create a dematerialized arguments object for the function, also copy the
8665 // current arguments values to use them for materialization.
8666 HEnvironment* arguments_env = inner_env->arguments_environment();
8667 int parameter_count = arguments_env->parameter_count();
8668 HArgumentsObject* arguments_object = Add<HArgumentsObject>(parameter_count);
8669 for (int i = 0; i < parameter_count; i++) {
8670 arguments_object->AddArgument(arguments_env->Lookup(i), zone());
8671 }
8672
8673 // If the function uses arguments object then bind bind one.
8674 if (function->scope()->arguments() != NULL) {
8675 DCHECK(function->scope()->arguments()->IsStackAllocated());
8676 inner_env->Bind(function->scope()->arguments(), arguments_object);
8677 }
8678
8679 // Capture the state before invoking the inlined function for deopt in the
8680 // inlined function. This simulate has no bailout-id since it's not directly
8681 // reachable for deopt, and is only used to capture the state. If the simulate
8682 // becomes reachable by merging, the ast id of the simulate merged into it is
8683 // adopted.
8684 Add<HSimulate>(BailoutId::None());
8685
8686 current_block()->UpdateEnvironment(inner_env);
8687 Scope* saved_scope = scope();
8688 set_scope(target_info.scope());
Ben Murdochda12d292016-06-02 14:46:10 +01008689 HEnterInlined* enter_inlined = Add<HEnterInlined>(
8690 return_id, target, context, arguments_count, function,
8691 function_state()->inlining_kind(), function->scope()->arguments(),
8692 arguments_object, syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008693 if (top_info()->is_tracking_positions()) {
8694 enter_inlined->set_inlining_id(inlining_id);
8695 }
8696 function_state()->set_entry(enter_inlined);
8697
8698 VisitDeclarations(target_info.scope()->declarations());
8699 VisitStatements(function->body());
8700 set_scope(saved_scope);
8701 if (HasStackOverflow()) {
8702 // Bail out if the inline function did, as we cannot residualize a call
8703 // instead, but do not disable optimization for the outer function.
8704 TraceInline(target, caller, "inline graph construction failed");
8705 target_shared->DisableOptimization(kInliningBailedOut);
8706 current_info()->RetryOptimization(kInliningBailedOut);
8707 delete target_state;
8708 return true;
8709 }
8710
8711 // Update inlined nodes count.
8712 inlined_count_ += nodes_added;
8713
8714 Handle<Code> unoptimized_code(target_shared->code());
8715 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
8716 Handle<TypeFeedbackInfo> type_info(
8717 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
8718 graph()->update_type_change_checksum(type_info->own_type_change_checksum());
8719
Ben Murdochda12d292016-06-02 14:46:10 +01008720 TraceInline(target, caller, NULL, syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008721
8722 if (current_block() != NULL) {
8723 FunctionState* state = function_state();
8724 if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
8725 // Falling off the end of an inlined construct call. In a test context the
8726 // return value will always evaluate to true, in a value context the
8727 // return value is the newly allocated receiver.
8728 if (call_context()->IsTest()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01008729 inlined_test_context()->ReturnValue(graph()->GetConstantTrue());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008730 } else if (call_context()->IsEffect()) {
8731 Goto(function_return(), state);
8732 } else {
8733 DCHECK(call_context()->IsValue());
8734 AddLeaveInlined(implicit_return_value, state);
8735 }
8736 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
8737 // Falling off the end of an inlined setter call. The returned value is
8738 // never used, the value of an assignment is always the value of the RHS
8739 // of the assignment.
8740 if (call_context()->IsTest()) {
8741 inlined_test_context()->ReturnValue(implicit_return_value);
8742 } else if (call_context()->IsEffect()) {
8743 Goto(function_return(), state);
8744 } else {
8745 DCHECK(call_context()->IsValue());
8746 AddLeaveInlined(implicit_return_value, state);
8747 }
8748 } else {
8749 // Falling off the end of a normal inlined function. This basically means
8750 // returning undefined.
8751 if (call_context()->IsTest()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01008752 inlined_test_context()->ReturnValue(graph()->GetConstantFalse());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008753 } else if (call_context()->IsEffect()) {
8754 Goto(function_return(), state);
8755 } else {
8756 DCHECK(call_context()->IsValue());
8757 AddLeaveInlined(undefined, state);
8758 }
8759 }
8760 }
8761
8762 // Fix up the function exits.
8763 if (inlined_test_context() != NULL) {
8764 HBasicBlock* if_true = inlined_test_context()->if_true();
8765 HBasicBlock* if_false = inlined_test_context()->if_false();
8766
8767 HEnterInlined* entry = function_state()->entry();
8768
8769 // Pop the return test context from the expression context stack.
8770 DCHECK(ast_context() == inlined_test_context());
8771 ClearInlinedTestContext();
8772 delete target_state;
8773
8774 // Forward to the real test context.
8775 if (if_true->HasPredecessor()) {
8776 entry->RegisterReturnTarget(if_true, zone());
8777 if_true->SetJoinId(ast_id);
8778 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
8779 Goto(if_true, true_target, function_state());
8780 }
8781 if (if_false->HasPredecessor()) {
8782 entry->RegisterReturnTarget(if_false, zone());
8783 if_false->SetJoinId(ast_id);
8784 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
8785 Goto(if_false, false_target, function_state());
8786 }
8787 set_current_block(NULL);
8788 return true;
8789
8790 } else if (function_return()->HasPredecessor()) {
8791 function_state()->entry()->RegisterReturnTarget(function_return(), zone());
8792 function_return()->SetJoinId(ast_id);
8793 set_current_block(function_return());
8794 } else {
8795 set_current_block(NULL);
8796 }
8797 delete target_state;
8798 return true;
8799}
8800
8801
8802bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
8803 return TryInline(expr->target(), expr->arguments()->length(), NULL,
Ben Murdochda12d292016-06-02 14:46:10 +01008804 expr->id(), expr->ReturnId(), NORMAL_RETURN,
8805 expr->tail_call_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008806}
8807
8808
8809bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
8810 HValue* implicit_return_value) {
8811 return TryInline(expr->target(), expr->arguments()->length(),
8812 implicit_return_value, expr->id(), expr->ReturnId(),
Ben Murdochda12d292016-06-02 14:46:10 +01008813 CONSTRUCT_CALL_RETURN, TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008814}
8815
Ben Murdoch097c5b22016-05-18 11:27:45 +01008816bool HOptimizedGraphBuilder::TryInlineGetter(Handle<Object> getter,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008817 Handle<Map> receiver_map,
8818 BailoutId ast_id,
8819 BailoutId return_id) {
8820 if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
Ben Murdoch61f157c2016-09-16 13:49:30 +01008821 if (getter->IsJSFunction()) {
8822 Handle<JSFunction> getter_function = Handle<JSFunction>::cast(getter);
8823 return TryInlineBuiltinGetterCall(getter_function, receiver_map, ast_id) ||
8824 TryInline(getter_function, 0, NULL, ast_id, return_id,
8825 GETTER_CALL_RETURN, TailCallMode::kDisallow);
8826 }
8827 return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008828}
8829
Ben Murdoch097c5b22016-05-18 11:27:45 +01008830bool HOptimizedGraphBuilder::TryInlineSetter(Handle<Object> setter,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008831 Handle<Map> receiver_map,
8832 BailoutId id,
8833 BailoutId assignment_id,
8834 HValue* implicit_return_value) {
8835 if (TryInlineApiSetter(setter, receiver_map, id)) return true;
Ben Murdoch097c5b22016-05-18 11:27:45 +01008836 return setter->IsJSFunction() &&
8837 TryInline(Handle<JSFunction>::cast(setter), 1, implicit_return_value,
Ben Murdochda12d292016-06-02 14:46:10 +01008838 id, assignment_id, SETTER_CALL_RETURN,
8839 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008840}
8841
8842
8843bool HOptimizedGraphBuilder::TryInlineIndirectCall(Handle<JSFunction> function,
8844 Call* expr,
8845 int arguments_count) {
8846 return TryInline(function, arguments_count, NULL, expr->id(),
Ben Murdochda12d292016-06-02 14:46:10 +01008847 expr->ReturnId(), NORMAL_RETURN, expr->tail_call_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008848}
8849
8850
8851bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
8852 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8853 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
Ben Murdochda12d292016-06-02 14:46:10 +01008854 // We intentionally ignore expr->tail_call_mode() here because builtins
8855 // we inline here do not observe if they were tail called or not.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008856 switch (id) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01008857 case kMathCos:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008858 case kMathExp:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008859 case kMathRound:
8860 case kMathFround:
8861 case kMathFloor:
8862 case kMathAbs:
Ben Murdoch61f157c2016-09-16 13:49:30 +01008863 case kMathSin:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008864 case kMathSqrt:
8865 case kMathLog:
8866 case kMathClz32:
8867 if (expr->arguments()->length() == 1) {
8868 HValue* argument = Pop();
8869 Drop(2); // Receiver and function.
8870 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8871 ast_context()->ReturnInstruction(op, expr->id());
8872 return true;
8873 }
8874 break;
8875 case kMathImul:
8876 if (expr->arguments()->length() == 2) {
8877 HValue* right = Pop();
8878 HValue* left = Pop();
8879 Drop(2); // Receiver and function.
8880 HInstruction* op =
8881 HMul::NewImul(isolate(), zone(), context(), left, right);
8882 ast_context()->ReturnInstruction(op, expr->id());
8883 return true;
8884 }
8885 break;
8886 default:
8887 // Not supported for inlining yet.
8888 break;
8889 }
8890 return false;
8891}
8892
8893
8894// static
8895bool HOptimizedGraphBuilder::IsReadOnlyLengthDescriptor(
8896 Handle<Map> jsarray_map) {
8897 DCHECK(!jsarray_map->is_dictionary_map());
8898 Isolate* isolate = jsarray_map->GetIsolate();
8899 Handle<Name> length_string = isolate->factory()->length_string();
8900 DescriptorArray* descriptors = jsarray_map->instance_descriptors();
Ben Murdoch097c5b22016-05-18 11:27:45 +01008901 int number =
8902 descriptors->SearchWithCache(isolate, *length_string, *jsarray_map);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008903 DCHECK_NE(DescriptorArray::kNotFound, number);
8904 return descriptors->GetDetails(number).IsReadOnly();
8905}
8906
8907
8908// static
8909bool HOptimizedGraphBuilder::CanInlineArrayResizeOperation(
8910 Handle<Map> receiver_map) {
8911 return !receiver_map.is_null() && receiver_map->prototype()->IsJSObject() &&
8912 receiver_map->instance_type() == JS_ARRAY_TYPE &&
8913 IsFastElementsKind(receiver_map->elements_kind()) &&
Ben Murdochc5610432016-08-08 18:44:38 +01008914 !receiver_map->is_dictionary_map() && receiver_map->is_extensible() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008915 (!receiver_map->is_prototype_map() || receiver_map->is_stable()) &&
8916 !IsReadOnlyLengthDescriptor(receiver_map);
8917}
8918
Ben Murdoch61f157c2016-09-16 13:49:30 +01008919bool HOptimizedGraphBuilder::TryInlineBuiltinGetterCall(
8920 Handle<JSFunction> function, Handle<Map> receiver_map, BailoutId ast_id) {
8921 if (!function->shared()->HasBuiltinFunctionId()) return false;
8922 BuiltinFunctionId id = function->shared()->builtin_function_id();
8923
8924 // Try to inline getter calls like DataView.prototype.byteLength/byteOffset
8925 // as operations in the calling function.
8926 switch (id) {
8927 case kDataViewBuffer: {
8928 if (!receiver_map->IsJSDataViewMap()) return false;
8929 HObjectAccess access = HObjectAccess::ForMapAndOffset(
8930 receiver_map, JSDataView::kBufferOffset);
8931 HValue* object = Pop(); // receiver
8932 HInstruction* result = New<HLoadNamedField>(object, object, access);
8933 ast_context()->ReturnInstruction(result, ast_id);
8934 return true;
8935 }
8936 case kDataViewByteLength:
8937 case kDataViewByteOffset: {
8938 if (!receiver_map->IsJSDataViewMap()) return false;
8939 int offset = (id == kDataViewByteLength) ? JSDataView::kByteLengthOffset
8940 : JSDataView::kByteOffsetOffset;
8941 HObjectAccess access =
8942 HObjectAccess::ForMapAndOffset(receiver_map, offset);
8943 HValue* object = Pop(); // receiver
8944 HValue* checked_object = Add<HCheckArrayBufferNotNeutered>(object);
8945 HInstruction* result =
8946 New<HLoadNamedField>(object, checked_object, access);
8947 ast_context()->ReturnInstruction(result, ast_id);
8948 return true;
8949 }
8950 case kTypedArrayByteLength:
8951 case kTypedArrayByteOffset:
8952 case kTypedArrayLength: {
8953 if (!receiver_map->IsJSTypedArrayMap()) return false;
8954 int offset = (id == kTypedArrayLength)
8955 ? JSTypedArray::kLengthOffset
8956 : (id == kTypedArrayByteLength)
8957 ? JSTypedArray::kByteLengthOffset
8958 : JSTypedArray::kByteOffsetOffset;
8959 HObjectAccess access =
8960 HObjectAccess::ForMapAndOffset(receiver_map, offset);
8961 HValue* object = Pop(); // receiver
8962 HValue* checked_object = Add<HCheckArrayBufferNotNeutered>(object);
8963 HInstruction* result =
8964 New<HLoadNamedField>(object, checked_object, access);
8965 ast_context()->ReturnInstruction(result, ast_id);
8966 return true;
8967 }
8968 default:
8969 return false;
8970 }
8971}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008972
8973bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
Ben Murdoch61f157c2016-09-16 13:49:30 +01008974 Handle<JSFunction> function, Handle<Map> receiver_map, BailoutId ast_id,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008975 int args_count_no_receiver) {
8976 if (!function->shared()->HasBuiltinFunctionId()) return false;
8977 BuiltinFunctionId id = function->shared()->builtin_function_id();
8978 int argument_count = args_count_no_receiver + 1; // Plus receiver.
8979
8980 if (receiver_map.is_null()) {
8981 HValue* receiver = environment()->ExpressionStackAt(args_count_no_receiver);
8982 if (receiver->IsConstant() &&
8983 HConstant::cast(receiver)->handle(isolate())->IsHeapObject()) {
8984 receiver_map =
8985 handle(Handle<HeapObject>::cast(
8986 HConstant::cast(receiver)->handle(isolate()))->map());
8987 }
8988 }
8989 // Try to inline calls like Math.* as operations in the calling function.
8990 switch (id) {
Ben Murdochda12d292016-06-02 14:46:10 +01008991 case kObjectHasOwnProperty: {
8992 // It's not safe to look through the phi for elements if we're compiling
8993 // for osr.
8994 if (top_info()->is_osr()) return false;
8995 if (argument_count != 2) return false;
8996 HValue* key = Top();
8997 if (!key->IsLoadKeyed()) return false;
8998 HValue* elements = HLoadKeyed::cast(key)->elements();
8999 if (!elements->IsPhi() || elements->OperandCount() != 1) return false;
9000 if (!elements->OperandAt(0)->IsForInCacheArray()) return false;
9001 HForInCacheArray* cache = HForInCacheArray::cast(elements->OperandAt(0));
9002 HValue* receiver = environment()->ExpressionStackAt(1);
9003 if (!receiver->IsPhi() || receiver->OperandCount() != 1) return false;
9004 if (cache->enumerable() != receiver->OperandAt(0)) return false;
9005 Drop(3); // key, receiver, function
9006 Add<HCheckMapValue>(receiver, cache->map());
9007 ast_context()->ReturnValue(graph()->GetConstantTrue());
9008 return true;
9009 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009010 case kStringCharCodeAt:
9011 case kStringCharAt:
9012 if (argument_count == 2) {
9013 HValue* index = Pop();
9014 HValue* string = Pop();
9015 Drop(1); // Function.
9016 HInstruction* char_code =
9017 BuildStringCharCodeAt(string, index);
9018 if (id == kStringCharCodeAt) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01009019 ast_context()->ReturnInstruction(char_code, ast_id);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009020 return true;
9021 }
9022 AddInstruction(char_code);
9023 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
Ben Murdoch61f157c2016-09-16 13:49:30 +01009024 ast_context()->ReturnInstruction(result, ast_id);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009025 return true;
9026 }
9027 break;
9028 case kStringFromCharCode:
9029 if (argument_count == 2) {
9030 HValue* argument = Pop();
9031 Drop(2); // Receiver and function.
Ben Murdochda12d292016-06-02 14:46:10 +01009032 argument = AddUncasted<HForceRepresentation>(
9033 argument, Representation::Integer32());
9034 argument->SetFlag(HValue::kTruncatingToInt32);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009035 HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
Ben Murdoch61f157c2016-09-16 13:49:30 +01009036 ast_context()->ReturnInstruction(result, ast_id);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009037 return true;
9038 }
9039 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +01009040 case kMathCos:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009041 case kMathExp:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009042 case kMathRound:
9043 case kMathFround:
9044 case kMathFloor:
9045 case kMathAbs:
Ben Murdoch61f157c2016-09-16 13:49:30 +01009046 case kMathSin:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009047 case kMathSqrt:
9048 case kMathLog:
9049 case kMathClz32:
9050 if (argument_count == 2) {
9051 HValue* argument = Pop();
9052 Drop(2); // Receiver and function.
9053 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
Ben Murdoch61f157c2016-09-16 13:49:30 +01009054 ast_context()->ReturnInstruction(op, ast_id);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009055 return true;
9056 }
9057 break;
9058 case kMathPow:
9059 if (argument_count == 3) {
9060 HValue* right = Pop();
9061 HValue* left = Pop();
9062 Drop(2); // Receiver and function.
9063 HInstruction* result = NULL;
9064 // Use sqrt() if exponent is 0.5 or -0.5.
9065 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
9066 double exponent = HConstant::cast(right)->DoubleValue();
9067 if (exponent == 0.5) {
9068 result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
9069 } else if (exponent == -0.5) {
9070 HValue* one = graph()->GetConstant1();
9071 HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
9072 left, kMathPowHalf);
9073 // MathPowHalf doesn't have side effects so there's no need for
9074 // an environment simulation here.
9075 DCHECK(!sqrt->HasObservableSideEffects());
9076 result = NewUncasted<HDiv>(one, sqrt);
9077 } else if (exponent == 2.0) {
9078 result = NewUncasted<HMul>(left, left);
9079 }
9080 }
9081
9082 if (result == NULL) {
9083 result = NewUncasted<HPower>(left, right);
9084 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01009085 ast_context()->ReturnInstruction(result, ast_id);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009086 return true;
9087 }
9088 break;
9089 case kMathMax:
9090 case kMathMin:
9091 if (argument_count == 3) {
9092 HValue* right = Pop();
9093 HValue* left = Pop();
9094 Drop(2); // Receiver and function.
9095 HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
9096 : HMathMinMax::kMathMax;
9097 HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
Ben Murdoch61f157c2016-09-16 13:49:30 +01009098 ast_context()->ReturnInstruction(result, ast_id);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009099 return true;
9100 }
9101 break;
9102 case kMathImul:
9103 if (argument_count == 3) {
9104 HValue* right = Pop();
9105 HValue* left = Pop();
9106 Drop(2); // Receiver and function.
9107 HInstruction* result =
9108 HMul::NewImul(isolate(), zone(), context(), left, right);
Ben Murdoch61f157c2016-09-16 13:49:30 +01009109 ast_context()->ReturnInstruction(result, ast_id);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009110 return true;
9111 }
9112 break;
9113 case kArrayPop: {
9114 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
9115 ElementsKind elements_kind = receiver_map->elements_kind();
9116
9117 Drop(args_count_no_receiver);
9118 HValue* result;
9119 HValue* reduced_length;
9120 HValue* receiver = Pop();
9121
9122 HValue* checked_object = AddCheckMap(receiver, receiver_map);
9123 HValue* length =
9124 Add<HLoadNamedField>(checked_object, nullptr,
9125 HObjectAccess::ForArrayLength(elements_kind));
9126
9127 Drop(1); // Function.
9128
9129 { NoObservableSideEffectsScope scope(this);
9130 IfBuilder length_checker(this);
9131
9132 HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
9133 length, graph()->GetConstant0(), Token::EQ);
9134 length_checker.Then();
9135
9136 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
9137
9138 length_checker.Else();
9139 HValue* elements = AddLoadElements(checked_object);
9140 // Ensure that we aren't popping from a copy-on-write array.
9141 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
9142 elements = BuildCopyElementsOnWrite(checked_object, elements,
9143 elements_kind, length);
9144 }
9145 reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
9146 result = AddElementAccess(elements, reduced_length, nullptr,
9147 bounds_check, nullptr, elements_kind, LOAD);
9148 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
9149 ? graph()->GetConstantHole()
9150 : Add<HConstant>(HConstant::kHoleNaN);
9151 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
9152 elements_kind = FAST_HOLEY_ELEMENTS;
9153 }
9154 AddElementAccess(elements, reduced_length, hole, bounds_check, nullptr,
9155 elements_kind, STORE);
9156 Add<HStoreNamedField>(
9157 checked_object, HObjectAccess::ForArrayLength(elements_kind),
9158 reduced_length, STORE_TO_INITIALIZED_ENTRY);
9159
9160 if (!ast_context()->IsEffect()) Push(result);
9161
9162 length_checker.End();
9163 }
9164 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
Ben Murdoch61f157c2016-09-16 13:49:30 +01009165 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009166 if (!ast_context()->IsEffect()) Drop(1);
9167
9168 ast_context()->ReturnValue(result);
9169 return true;
9170 }
9171 case kArrayPush: {
9172 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
9173 ElementsKind elements_kind = receiver_map->elements_kind();
9174
9175 // If there may be elements accessors in the prototype chain, the fast
9176 // inlined version can't be used.
9177 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
9178 // If there currently can be no elements accessors on the prototype chain,
9179 // it doesn't mean that there won't be any later. Install a full prototype
9180 // chain check to trap element accessors being installed on the prototype
9181 // chain, which would cause elements to go to dictionary mode and result
9182 // in a map change.
9183 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
9184 BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
9185
9186 // Protect against adding elements to the Array prototype, which needs to
9187 // route through appropriate bottlenecks.
9188 if (isolate()->IsFastArrayConstructorPrototypeChainIntact() &&
9189 !prototype->IsJSArray()) {
9190 return false;
9191 }
9192
9193 const int argc = args_count_no_receiver;
9194 if (argc != 1) return false;
9195
9196 HValue* value_to_push = Pop();
9197 HValue* array = Pop();
9198 Drop(1); // Drop function.
9199
9200 HInstruction* new_size = NULL;
9201 HValue* length = NULL;
9202
9203 {
9204 NoObservableSideEffectsScope scope(this);
9205
9206 length = Add<HLoadNamedField>(
9207 array, nullptr, HObjectAccess::ForArrayLength(elements_kind));
9208
9209 new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
9210
9211 bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
9212 HValue* checked_array = Add<HCheckMaps>(array, receiver_map);
9213 BuildUncheckedMonomorphicElementAccess(
9214 checked_array, length, value_to_push, is_array, elements_kind,
9215 STORE, NEVER_RETURN_HOLE, STORE_AND_GROW_NO_TRANSITION);
9216
9217 if (!ast_context()->IsEffect()) Push(new_size);
Ben Murdoch61f157c2016-09-16 13:49:30 +01009218 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009219 if (!ast_context()->IsEffect()) Drop(1);
9220 }
9221
9222 ast_context()->ReturnValue(new_size);
9223 return true;
9224 }
9225 case kArrayShift: {
9226 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
9227 ElementsKind kind = receiver_map->elements_kind();
9228
9229 // If there may be elements accessors in the prototype chain, the fast
9230 // inlined version can't be used.
9231 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
9232
9233 // If there currently can be no elements accessors on the prototype chain,
9234 // it doesn't mean that there won't be any later. Install a full prototype
9235 // chain check to trap element accessors being installed on the prototype
9236 // chain, which would cause elements to go to dictionary mode and result
9237 // in a map change.
9238 BuildCheckPrototypeMaps(
9239 handle(JSObject::cast(receiver_map->prototype()), isolate()),
9240 Handle<JSObject>::null());
9241
9242 // Threshold for fast inlined Array.shift().
9243 HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
9244
9245 Drop(args_count_no_receiver);
9246 HValue* receiver = Pop();
Ben Murdochda12d292016-06-02 14:46:10 +01009247 Drop(1); // Function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009248 HValue* result;
9249
9250 {
9251 NoObservableSideEffectsScope scope(this);
9252
9253 HValue* length = Add<HLoadNamedField>(
9254 receiver, nullptr, HObjectAccess::ForArrayLength(kind));
9255
9256 IfBuilder if_lengthiszero(this);
9257 HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
9258 length, graph()->GetConstant0(), Token::EQ);
9259 if_lengthiszero.Then();
9260 {
9261 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
9262 }
9263 if_lengthiszero.Else();
9264 {
9265 HValue* elements = AddLoadElements(receiver);
9266
9267 // Check if we can use the fast inlined Array.shift().
9268 IfBuilder if_inline(this);
9269 if_inline.If<HCompareNumericAndBranch>(
9270 length, inline_threshold, Token::LTE);
9271 if (IsFastSmiOrObjectElementsKind(kind)) {
9272 // We cannot handle copy-on-write backing stores here.
9273 if_inline.AndIf<HCompareMap>(
9274 elements, isolate()->factory()->fixed_array_map());
9275 }
9276 if_inline.Then();
9277 {
9278 // Remember the result.
9279 if (!ast_context()->IsEffect()) {
9280 Push(AddElementAccess(elements, graph()->GetConstant0(), nullptr,
9281 lengthiszero, nullptr, kind, LOAD));
9282 }
9283
9284 // Compute the new length.
9285 HValue* new_length = AddUncasted<HSub>(
9286 length, graph()->GetConstant1());
9287 new_length->ClearFlag(HValue::kCanOverflow);
9288
9289 // Copy the remaining elements.
9290 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
9291 {
9292 HValue* new_key = loop.BeginBody(
9293 graph()->GetConstant0(), new_length, Token::LT);
9294 HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
9295 key->ClearFlag(HValue::kCanOverflow);
9296 ElementsKind copy_kind =
9297 kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
9298 HValue* element =
9299 AddUncasted<HLoadKeyed>(elements, key, lengthiszero, nullptr,
9300 copy_kind, ALLOW_RETURN_HOLE);
9301 HStoreKeyed* store = Add<HStoreKeyed>(elements, new_key, element,
9302 nullptr, copy_kind);
9303 store->SetFlag(HValue::kAllowUndefinedAsNaN);
9304 }
9305 loop.EndBody();
9306
9307 // Put a hole at the end.
9308 HValue* hole = IsFastSmiOrObjectElementsKind(kind)
9309 ? graph()->GetConstantHole()
9310 : Add<HConstant>(HConstant::kHoleNaN);
9311 if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS;
9312 Add<HStoreKeyed>(elements, new_length, hole, nullptr, kind,
9313 INITIALIZING_STORE);
9314
9315 // Remember new length.
9316 Add<HStoreNamedField>(
9317 receiver, HObjectAccess::ForArrayLength(kind),
9318 new_length, STORE_TO_INITIALIZED_ENTRY);
9319 }
9320 if_inline.Else();
9321 {
9322 Add<HPushArguments>(receiver);
Ben Murdochda12d292016-06-02 14:46:10 +01009323 result = AddInstruction(NewCallConstantFunction(
9324 function, 1, TailCallMode::kDisallow, TailCallMode::kDisallow));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009325 if (!ast_context()->IsEffect()) Push(result);
9326 }
9327 if_inline.End();
9328 }
9329 if_lengthiszero.End();
9330 }
9331 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
Ben Murdoch61f157c2016-09-16 13:49:30 +01009332 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009333 if (!ast_context()->IsEffect()) Drop(1);
9334 ast_context()->ReturnValue(result);
9335 return true;
9336 }
9337 case kArrayIndexOf:
9338 case kArrayLastIndexOf: {
9339 if (receiver_map.is_null()) return false;
9340 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
Ben Murdoch097c5b22016-05-18 11:27:45 +01009341 if (!receiver_map->prototype()->IsJSObject()) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009342 ElementsKind kind = receiver_map->elements_kind();
9343 if (!IsFastElementsKind(kind)) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009344 if (argument_count != 2) return false;
9345 if (!receiver_map->is_extensible()) return false;
9346
9347 // If there may be elements accessors in the prototype chain, the fast
9348 // inlined version can't be used.
9349 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
9350
9351 // If there currently can be no elements accessors on the prototype chain,
9352 // it doesn't mean that there won't be any later. Install a full prototype
9353 // chain check to trap element accessors being installed on the prototype
9354 // chain, which would cause elements to go to dictionary mode and result
9355 // in a map change.
9356 BuildCheckPrototypeMaps(
9357 handle(JSObject::cast(receiver_map->prototype()), isolate()),
9358 Handle<JSObject>::null());
9359
9360 HValue* search_element = Pop();
9361 HValue* receiver = Pop();
9362 Drop(1); // Drop function.
9363
9364 ArrayIndexOfMode mode = (id == kArrayIndexOf)
9365 ? kFirstIndexOf : kLastIndexOf;
9366 HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
9367
9368 if (!ast_context()->IsEffect()) Push(index);
Ben Murdoch61f157c2016-09-16 13:49:30 +01009369 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009370 if (!ast_context()->IsEffect()) Drop(1);
9371 ast_context()->ReturnValue(index);
9372 return true;
9373 }
9374 default:
9375 // Not yet supported for inlining.
9376 break;
9377 }
9378 return false;
9379}
9380
9381
9382bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
9383 HValue* receiver) {
9384 Handle<JSFunction> function = expr->target();
9385 int argc = expr->arguments()->length();
9386 SmallMapList receiver_maps;
Ben Murdochda12d292016-06-02 14:46:10 +01009387 return TryInlineApiCall(function, receiver, &receiver_maps, argc, expr->id(),
9388 kCallApiFunction, expr->tail_call_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009389}
9390
9391
9392bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
9393 Call* expr,
9394 HValue* receiver,
9395 SmallMapList* receiver_maps) {
9396 Handle<JSFunction> function = expr->target();
9397 int argc = expr->arguments()->length();
Ben Murdochda12d292016-06-02 14:46:10 +01009398 return TryInlineApiCall(function, receiver, receiver_maps, argc, expr->id(),
9399 kCallApiMethod, expr->tail_call_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009400}
9401
Ben Murdoch097c5b22016-05-18 11:27:45 +01009402bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<Object> function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009403 Handle<Map> receiver_map,
9404 BailoutId ast_id) {
9405 SmallMapList receiver_maps(1, zone());
9406 receiver_maps.Add(receiver_map, zone());
9407 return TryInlineApiCall(function,
9408 NULL, // Receiver is on expression stack.
Ben Murdochda12d292016-06-02 14:46:10 +01009409 &receiver_maps, 0, ast_id, kCallApiGetter,
9410 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009411}
9412
Ben Murdoch097c5b22016-05-18 11:27:45 +01009413bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<Object> function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009414 Handle<Map> receiver_map,
9415 BailoutId ast_id) {
9416 SmallMapList receiver_maps(1, zone());
9417 receiver_maps.Add(receiver_map, zone());
9418 return TryInlineApiCall(function,
9419 NULL, // Receiver is on expression stack.
Ben Murdochda12d292016-06-02 14:46:10 +01009420 &receiver_maps, 1, ast_id, kCallApiSetter,
9421 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009422}
9423
Ben Murdochda12d292016-06-02 14:46:10 +01009424bool HOptimizedGraphBuilder::TryInlineApiCall(
9425 Handle<Object> function, HValue* receiver, SmallMapList* receiver_maps,
9426 int argc, BailoutId ast_id, ApiCallType call_type,
9427 TailCallMode syntactic_tail_call_mode) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01009428 if (function->IsJSFunction() &&
9429 Handle<JSFunction>::cast(function)->context()->native_context() !=
9430 top_info()->closure()->context()->native_context()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009431 return false;
9432 }
Ben Murdochda12d292016-06-02 14:46:10 +01009433 if (argc > CallApiCallbackStub::kArgMax) {
9434 return false;
9435 }
9436
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009437 CallOptimization optimization(function);
9438 if (!optimization.is_simple_api_call()) return false;
9439 Handle<Map> holder_map;
9440 for (int i = 0; i < receiver_maps->length(); ++i) {
9441 auto map = receiver_maps->at(i);
9442 // Don't inline calls to receivers requiring accesschecks.
9443 if (map->is_access_check_needed()) return false;
9444 }
9445 if (call_type == kCallApiFunction) {
9446 // Cannot embed a direct reference to the global proxy map
9447 // as it maybe dropped on deserialization.
9448 CHECK(!isolate()->serializer_enabled());
Ben Murdoch097c5b22016-05-18 11:27:45 +01009449 DCHECK(function->IsJSFunction());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009450 DCHECK_EQ(0, receiver_maps->length());
Ben Murdoch097c5b22016-05-18 11:27:45 +01009451 receiver_maps->Add(
9452 handle(Handle<JSFunction>::cast(function)->global_proxy()->map()),
9453 zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009454 }
9455 CallOptimization::HolderLookup holder_lookup =
9456 CallOptimization::kHolderNotFound;
9457 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
9458 receiver_maps->first(), &holder_lookup);
9459 if (holder_lookup == CallOptimization::kHolderNotFound) return false;
9460
9461 if (FLAG_trace_inlining) {
9462 PrintF("Inlining api function ");
9463 function->ShortPrint();
9464 PrintF("\n");
9465 }
9466
9467 bool is_function = false;
9468 bool is_store = false;
9469 switch (call_type) {
9470 case kCallApiFunction:
9471 case kCallApiMethod:
9472 // Need to check that none of the receiver maps could have changed.
9473 Add<HCheckMaps>(receiver, receiver_maps);
9474 // Need to ensure the chain between receiver and api_holder is intact.
9475 if (holder_lookup == CallOptimization::kHolderFound) {
9476 AddCheckPrototypeMaps(api_holder, receiver_maps->first());
9477 } else {
9478 DCHECK_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
9479 }
9480 // Includes receiver.
9481 PushArgumentsFromEnvironment(argc + 1);
9482 is_function = true;
9483 break;
9484 case kCallApiGetter:
9485 // Receiver and prototype chain cannot have changed.
9486 DCHECK_EQ(0, argc);
9487 DCHECK_NULL(receiver);
9488 // Receiver is on expression stack.
9489 receiver = Pop();
9490 Add<HPushArguments>(receiver);
9491 break;
9492 case kCallApiSetter:
9493 {
9494 is_store = true;
9495 // Receiver and prototype chain cannot have changed.
9496 DCHECK_EQ(1, argc);
9497 DCHECK_NULL(receiver);
9498 // Receiver and value are on expression stack.
9499 HValue* value = Pop();
9500 receiver = Pop();
9501 Add<HPushArguments>(receiver, value);
9502 break;
9503 }
9504 }
9505
9506 HValue* holder = NULL;
9507 switch (holder_lookup) {
9508 case CallOptimization::kHolderFound:
9509 holder = Add<HConstant>(api_holder);
9510 break;
9511 case CallOptimization::kHolderIsReceiver:
9512 holder = receiver;
9513 break;
9514 case CallOptimization::kHolderNotFound:
9515 UNREACHABLE();
9516 break;
9517 }
9518 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
9519 Handle<Object> call_data_obj(api_call_info->data(), isolate());
Ben Murdoch61f157c2016-09-16 13:49:30 +01009520 bool call_data_undefined = call_data_obj->IsUndefined(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009521 HValue* call_data = Add<HConstant>(call_data_obj);
9522 ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
9523 ExternalReference ref = ExternalReference(&fun,
9524 ExternalReference::DIRECT_API_CALL,
9525 isolate());
9526 HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
9527
9528 HValue* op_vals[] = {context(), Add<HConstant>(function), call_data, holder,
9529 api_function_address, nullptr};
9530
9531 HInstruction* call = nullptr;
Ben Murdochda12d292016-06-02 14:46:10 +01009532 CHECK(argc <= CallApiCallbackStub::kArgMax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009533 if (!is_function) {
Ben Murdochda12d292016-06-02 14:46:10 +01009534 CallApiCallbackStub stub(isolate(), is_store, call_data_undefined,
Ben Murdoch097c5b22016-05-18 11:27:45 +01009535 !optimization.is_constant_call());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009536 Handle<Code> code = stub.GetCode();
9537 HConstant* code_value = Add<HConstant>(code);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009538 call = New<HCallWithDescriptor>(
Ben Murdochda12d292016-06-02 14:46:10 +01009539 code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
9540 Vector<HValue*>(op_vals, arraysize(op_vals) - 1),
9541 syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009542 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01009543 CallApiCallbackStub stub(isolate(), argc, call_data_undefined);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009544 Handle<Code> code = stub.GetCode();
9545 HConstant* code_value = Add<HConstant>(code);
Ben Murdochda12d292016-06-02 14:46:10 +01009546 call = New<HCallWithDescriptor>(
9547 code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
9548 Vector<HValue*>(op_vals, arraysize(op_vals) - 1),
9549 syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009550 Drop(1); // Drop function.
9551 }
9552
9553 ast_context()->ReturnInstruction(call, ast_id);
9554 return true;
9555}
9556
9557
9558void HOptimizedGraphBuilder::HandleIndirectCall(Call* expr, HValue* function,
9559 int arguments_count) {
9560 Handle<JSFunction> known_function;
9561 int args_count_no_receiver = arguments_count - 1;
9562 if (function->IsConstant() &&
9563 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9564 known_function =
9565 Handle<JSFunction>::cast(HConstant::cast(function)->handle(isolate()));
Ben Murdoch61f157c2016-09-16 13:49:30 +01009566 if (TryInlineBuiltinMethodCall(known_function, Handle<Map>(), expr->id(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009567 args_count_no_receiver)) {
9568 if (FLAG_trace_inlining) {
9569 PrintF("Inlining builtin ");
9570 known_function->ShortPrint();
9571 PrintF("\n");
9572 }
9573 return;
9574 }
9575
9576 if (TryInlineIndirectCall(known_function, expr, args_count_no_receiver)) {
9577 return;
9578 }
9579 }
9580
Ben Murdochda12d292016-06-02 14:46:10 +01009581 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9582 TailCallMode tail_call_mode =
9583 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9584
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009585 PushArgumentsFromEnvironment(arguments_count);
9586 HInvokeFunction* call =
Ben Murdochda12d292016-06-02 14:46:10 +01009587 New<HInvokeFunction>(function, known_function, arguments_count,
9588 syntactic_tail_call_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009589 Drop(1); // Function
9590 ast_context()->ReturnInstruction(call, expr->id());
9591}
9592
9593
9594bool HOptimizedGraphBuilder::TryIndirectCall(Call* expr) {
9595 DCHECK(expr->expression()->IsProperty());
9596
9597 if (!expr->IsMonomorphic()) {
9598 return false;
9599 }
9600 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9601 if (function_map->instance_type() != JS_FUNCTION_TYPE ||
9602 !expr->target()->shared()->HasBuiltinFunctionId()) {
9603 return false;
9604 }
9605
9606 switch (expr->target()->shared()->builtin_function_id()) {
9607 case kFunctionCall: {
9608 if (expr->arguments()->length() == 0) return false;
9609 BuildFunctionCall(expr);
9610 return true;
9611 }
9612 case kFunctionApply: {
9613 // For .apply, only the pattern f.apply(receiver, arguments)
9614 // is supported.
9615 if (current_info()->scope()->arguments() == NULL) return false;
9616
9617 if (!CanBeFunctionApplyArguments(expr)) return false;
9618
9619 BuildFunctionApply(expr);
9620 return true;
9621 }
9622 default: { return false; }
9623 }
9624 UNREACHABLE();
9625}
9626
9627
9628// f.apply(...)
9629void HOptimizedGraphBuilder::BuildFunctionApply(Call* expr) {
9630 ZoneList<Expression*>* args = expr->arguments();
9631 CHECK_ALIVE(VisitForValue(args->at(0)));
9632 HValue* receiver = Pop(); // receiver
9633 HValue* function = Pop(); // f
9634 Drop(1); // apply
9635
9636 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9637 HValue* checked_function = AddCheckMap(function, function_map);
9638
9639 if (function_state()->outer() == NULL) {
Ben Murdochda12d292016-06-02 14:46:10 +01009640 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9641 TailCallMode tail_call_mode =
9642 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009644 HInstruction* elements = Add<HArgumentsElements>(false);
9645 HInstruction* length = Add<HArgumentsLength>(elements);
9646 HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function);
Ben Murdochda12d292016-06-02 14:46:10 +01009647 HInstruction* result = New<HApplyArguments>(
9648 function, wrapped_receiver, length, elements, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009649 ast_context()->ReturnInstruction(result, expr->id());
9650 } else {
9651 // We are inside inlined function and we know exactly what is inside
9652 // arguments object. But we need to be able to materialize at deopt.
9653 DCHECK_EQ(environment()->arguments_environment()->parameter_count(),
9654 function_state()->entry()->arguments_object()->arguments_count());
9655 HArgumentsObject* args = function_state()->entry()->arguments_object();
9656 const ZoneList<HValue*>* arguments_values = args->arguments_values();
9657 int arguments_count = arguments_values->length();
9658 Push(function);
9659 Push(BuildWrapReceiver(receiver, checked_function));
9660 for (int i = 1; i < arguments_count; i++) {
9661 Push(arguments_values->at(i));
9662 }
9663 HandleIndirectCall(expr, function, arguments_count);
9664 }
9665}
9666
9667
9668// f.call(...)
9669void HOptimizedGraphBuilder::BuildFunctionCall(Call* expr) {
9670 HValue* function = Top(); // f
9671 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9672 HValue* checked_function = AddCheckMap(function, function_map);
9673
9674 // f and call are on the stack in the unoptimized code
9675 // during evaluation of the arguments.
9676 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9677
9678 int args_length = expr->arguments()->length();
9679 int receiver_index = args_length - 1;
9680 // Patch the receiver.
9681 HValue* receiver = BuildWrapReceiver(
9682 environment()->ExpressionStackAt(receiver_index), checked_function);
9683 environment()->SetExpressionStackAt(receiver_index, receiver);
9684
9685 // Call must not be on the stack from now on.
9686 int call_index = args_length + 1;
9687 environment()->RemoveExpressionStackAt(call_index);
9688
9689 HandleIndirectCall(expr, function, args_length);
9690}
9691
9692
9693HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
9694 Handle<JSFunction> target) {
9695 SharedFunctionInfo* shared = target->shared();
9696 if (is_sloppy(shared->language_mode()) && !shared->native()) {
9697 // Cannot embed a direct reference to the global proxy
9698 // as is it dropped on deserialization.
9699 CHECK(!isolate()->serializer_enabled());
9700 Handle<JSObject> global_proxy(target->context()->global_proxy());
9701 return Add<HConstant>(global_proxy);
9702 }
9703 return graph()->GetConstantUndefined();
9704}
9705
9706
9707void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression,
9708 int arguments_count,
9709 HValue* function,
9710 Handle<AllocationSite> site) {
9711 Add<HCheckValue>(function, array_function());
9712
9713 if (IsCallArrayInlineable(arguments_count, site)) {
9714 BuildInlinedCallArray(expression, arguments_count, site);
9715 return;
9716 }
9717
9718 HInstruction* call = PreProcessCall(New<HCallNewArray>(
9719 function, arguments_count + 1, site->GetElementsKind(), site));
9720 if (expression->IsCall()) {
9721 Drop(1);
9722 }
9723 ast_context()->ReturnInstruction(call, expression->id());
9724}
9725
9726
9727HValue* HOptimizedGraphBuilder::BuildArrayIndexOf(HValue* receiver,
9728 HValue* search_element,
9729 ElementsKind kind,
9730 ArrayIndexOfMode mode) {
9731 DCHECK(IsFastElementsKind(kind));
9732
9733 NoObservableSideEffectsScope no_effects(this);
9734
9735 HValue* elements = AddLoadElements(receiver);
9736 HValue* length = AddLoadArrayLength(receiver, kind);
9737
9738 HValue* initial;
9739 HValue* terminating;
9740 Token::Value token;
9741 LoopBuilder::Direction direction;
9742 if (mode == kFirstIndexOf) {
9743 initial = graph()->GetConstant0();
9744 terminating = length;
9745 token = Token::LT;
9746 direction = LoopBuilder::kPostIncrement;
9747 } else {
9748 DCHECK_EQ(kLastIndexOf, mode);
9749 initial = length;
9750 terminating = graph()->GetConstant0();
9751 token = Token::GT;
9752 direction = LoopBuilder::kPreDecrement;
9753 }
9754
9755 Push(graph()->GetConstantMinus1());
9756 if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) {
9757 // Make sure that we can actually compare numbers correctly below, see
9758 // https://code.google.com/p/chromium/issues/detail?id=407946 for details.
9759 search_element = AddUncasted<HForceRepresentation>(
9760 search_element, IsFastSmiElementsKind(kind) ? Representation::Smi()
9761 : Representation::Double());
9762
9763 LoopBuilder loop(this, context(), direction);
9764 {
9765 HValue* index = loop.BeginBody(initial, terminating, token);
9766 HValue* element = AddUncasted<HLoadKeyed>(
9767 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9768 IfBuilder if_issame(this);
9769 if_issame.If<HCompareNumericAndBranch>(element, search_element,
9770 Token::EQ_STRICT);
9771 if_issame.Then();
9772 {
9773 Drop(1);
9774 Push(index);
9775 loop.Break();
9776 }
9777 if_issame.End();
9778 }
9779 loop.EndBody();
9780 } else {
9781 IfBuilder if_isstring(this);
9782 if_isstring.If<HIsStringAndBranch>(search_element);
9783 if_isstring.Then();
9784 {
9785 LoopBuilder loop(this, context(), direction);
9786 {
9787 HValue* index = loop.BeginBody(initial, terminating, token);
9788 HValue* element = AddUncasted<HLoadKeyed>(
9789 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9790 IfBuilder if_issame(this);
9791 if_issame.If<HIsStringAndBranch>(element);
9792 if_issame.AndIf<HStringCompareAndBranch>(
9793 element, search_element, Token::EQ_STRICT);
9794 if_issame.Then();
9795 {
9796 Drop(1);
9797 Push(index);
9798 loop.Break();
9799 }
9800 if_issame.End();
9801 }
9802 loop.EndBody();
9803 }
9804 if_isstring.Else();
9805 {
9806 IfBuilder if_isnumber(this);
9807 if_isnumber.If<HIsSmiAndBranch>(search_element);
9808 if_isnumber.OrIf<HCompareMap>(
9809 search_element, isolate()->factory()->heap_number_map());
9810 if_isnumber.Then();
9811 {
9812 HValue* search_number =
9813 AddUncasted<HForceRepresentation>(search_element,
9814 Representation::Double());
9815 LoopBuilder loop(this, context(), direction);
9816 {
9817 HValue* index = loop.BeginBody(initial, terminating, token);
9818 HValue* element = AddUncasted<HLoadKeyed>(
9819 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9820
9821 IfBuilder if_element_isnumber(this);
9822 if_element_isnumber.If<HIsSmiAndBranch>(element);
9823 if_element_isnumber.OrIf<HCompareMap>(
9824 element, isolate()->factory()->heap_number_map());
9825 if_element_isnumber.Then();
9826 {
9827 HValue* number =
9828 AddUncasted<HForceRepresentation>(element,
9829 Representation::Double());
9830 IfBuilder if_issame(this);
9831 if_issame.If<HCompareNumericAndBranch>(
9832 number, search_number, Token::EQ_STRICT);
9833 if_issame.Then();
9834 {
9835 Drop(1);
9836 Push(index);
9837 loop.Break();
9838 }
9839 if_issame.End();
9840 }
9841 if_element_isnumber.End();
9842 }
9843 loop.EndBody();
9844 }
9845 if_isnumber.Else();
9846 {
9847 LoopBuilder loop(this, context(), direction);
9848 {
9849 HValue* index = loop.BeginBody(initial, terminating, token);
9850 HValue* element = AddUncasted<HLoadKeyed>(
9851 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9852 IfBuilder if_issame(this);
9853 if_issame.If<HCompareObjectEqAndBranch>(
9854 element, search_element);
9855 if_issame.Then();
9856 {
9857 Drop(1);
9858 Push(index);
9859 loop.Break();
9860 }
9861 if_issame.End();
9862 }
9863 loop.EndBody();
9864 }
9865 if_isnumber.End();
9866 }
9867 if_isstring.End();
9868 }
9869
9870 return Pop();
9871}
9872
9873
9874bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) {
9875 if (!array_function().is_identical_to(expr->target())) {
9876 return false;
9877 }
9878
9879 Handle<AllocationSite> site = expr->allocation_site();
9880 if (site.is_null()) return false;
9881
9882 BuildArrayCall(expr,
9883 expr->arguments()->length(),
9884 function,
9885 site);
9886 return true;
9887}
9888
9889
9890bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr,
9891 HValue* function) {
9892 if (!array_function().is_identical_to(expr->target())) {
9893 return false;
9894 }
9895
9896 Handle<AllocationSite> site = expr->allocation_site();
9897 if (site.is_null()) return false;
9898
9899 BuildArrayCall(expr, expr->arguments()->length(), function, site);
9900 return true;
9901}
9902
9903
9904bool HOptimizedGraphBuilder::CanBeFunctionApplyArguments(Call* expr) {
9905 ZoneList<Expression*>* args = expr->arguments();
9906 if (args->length() != 2) return false;
9907 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
9908 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
9909 HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
9910 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
9911 return true;
9912}
9913
9914
9915void HOptimizedGraphBuilder::VisitCall(Call* expr) {
9916 DCHECK(!HasStackOverflow());
9917 DCHECK(current_block() != NULL);
9918 DCHECK(current_block()->HasPredecessor());
9919 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
9920 Expression* callee = expr->expression();
9921 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
9922 HInstruction* call = NULL;
9923
Ben Murdochda12d292016-06-02 14:46:10 +01009924 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9925 TailCallMode tail_call_mode =
9926 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9927
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009928 Property* prop = callee->AsProperty();
9929 if (prop != NULL) {
9930 CHECK_ALIVE(VisitForValue(prop->obj()));
9931 HValue* receiver = Top();
9932
9933 SmallMapList* maps;
Ben Murdoch097c5b22016-05-18 11:27:45 +01009934 ComputeReceiverTypes(expr, receiver, &maps, this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009935
9936 if (prop->key()->IsPropertyName() && maps->length() > 0) {
9937 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
9938 PropertyAccessInfo info(this, LOAD, maps->first(), name);
9939 if (!info.CanAccessAsMonomorphic(maps)) {
9940 HandlePolymorphicCallNamed(expr, receiver, maps, name);
9941 return;
9942 }
9943 }
9944 HValue* key = NULL;
9945 if (!prop->key()->IsPropertyName()) {
9946 CHECK_ALIVE(VisitForValue(prop->key()));
9947 key = Pop();
9948 }
9949
9950 CHECK_ALIVE(PushLoad(prop, receiver, key));
9951 HValue* function = Pop();
9952
9953 if (function->IsConstant() &&
9954 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9955 // Push the function under the receiver.
9956 environment()->SetExpressionStackAt(0, function);
9957 Push(receiver);
9958
9959 Handle<JSFunction> known_function = Handle<JSFunction>::cast(
9960 HConstant::cast(function)->handle(isolate()));
9961 expr->set_target(known_function);
9962
9963 if (TryIndirectCall(expr)) return;
9964 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9965
9966 Handle<Map> map = maps->length() == 1 ? maps->first() : Handle<Map>();
Ben Murdoch61f157c2016-09-16 13:49:30 +01009967 if (TryInlineBuiltinMethodCall(known_function, map, expr->id(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009968 expr->arguments()->length())) {
9969 if (FLAG_trace_inlining) {
9970 PrintF("Inlining builtin ");
9971 known_function->ShortPrint();
9972 PrintF("\n");
9973 }
9974 return;
9975 }
9976 if (TryInlineApiMethodCall(expr, receiver, maps)) return;
9977
9978 // Wrap the receiver if necessary.
9979 if (NeedsWrapping(maps->first(), known_function)) {
9980 // Since HWrapReceiver currently cannot actually wrap numbers and
Ben Murdochda12d292016-06-02 14:46:10 +01009981 // strings, use the regular call builtin for method calls to wrap
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009982 // the receiver.
9983 // TODO(verwaest): Support creation of value wrappers directly in
9984 // HWrapReceiver.
Ben Murdochda12d292016-06-02 14:46:10 +01009985 call = NewCallFunction(
9986 function, argument_count, syntactic_tail_call_mode,
9987 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009988 } else if (TryInlineCall(expr)) {
9989 return;
9990 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01009991 call =
9992 NewCallConstantFunction(known_function, argument_count,
9993 syntactic_tail_call_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009994 }
9995
9996 } else {
9997 ArgumentsAllowedFlag arguments_flag = ARGUMENTS_NOT_ALLOWED;
9998 if (CanBeFunctionApplyArguments(expr) && expr->is_uninitialized()) {
9999 // We have to use EAGER deoptimization here because Deoptimizer::SOFT
10000 // gets ignored by the always-opt flag, which leads to incorrect code.
10001 Add<HDeoptimize>(
10002 Deoptimizer::kInsufficientTypeFeedbackForCallWithArguments,
10003 Deoptimizer::EAGER);
10004 arguments_flag = ARGUMENTS_FAKED;
10005 }
10006
10007 // Push the function under the receiver.
10008 environment()->SetExpressionStackAt(0, function);
10009 Push(receiver);
10010
10011 CHECK_ALIVE(VisitExpressions(expr->arguments(), arguments_flag));
Ben Murdochda12d292016-06-02 14:46:10 +010010012 call = NewCallFunction(function, argument_count, syntactic_tail_call_mode,
10013 ConvertReceiverMode::kNotNullOrUndefined,
10014 tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010015 }
10016 PushArgumentsFromEnvironment(argument_count);
10017
10018 } else {
10019 VariableProxy* proxy = expr->expression()->AsVariableProxy();
10020 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
10021 return Bailout(kPossibleDirectCallToEval);
10022 }
10023
10024 // The function is on the stack in the unoptimized code during
10025 // evaluation of the arguments.
10026 CHECK_ALIVE(VisitForValue(expr->expression()));
10027 HValue* function = Top();
10028 if (function->IsConstant() &&
10029 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
10030 Handle<Object> constant = HConstant::cast(function)->handle(isolate());
10031 Handle<JSFunction> target = Handle<JSFunction>::cast(constant);
10032 expr->SetKnownGlobalTarget(target);
10033 }
10034
10035 // Placeholder for the receiver.
10036 Push(graph()->GetConstantUndefined());
10037 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10038
10039 if (expr->IsMonomorphic() &&
10040 !IsClassConstructor(expr->target()->shared()->kind())) {
10041 Add<HCheckValue>(function, expr->target());
10042
10043 // Patch the global object on the stack by the expected receiver.
10044 HValue* receiver = ImplicitReceiverFor(function, expr->target());
10045 const int receiver_index = argument_count - 1;
10046 environment()->SetExpressionStackAt(receiver_index, receiver);
10047
10048 if (TryInlineBuiltinFunctionCall(expr)) {
10049 if (FLAG_trace_inlining) {
10050 PrintF("Inlining builtin ");
10051 expr->target()->ShortPrint();
10052 PrintF("\n");
10053 }
10054 return;
10055 }
10056 if (TryInlineApiFunctionCall(expr, receiver)) return;
10057 if (TryHandleArrayCall(expr, function)) return;
10058 if (TryInlineCall(expr)) return;
10059
10060 PushArgumentsFromEnvironment(argument_count);
Ben Murdochda12d292016-06-02 14:46:10 +010010061 call = NewCallConstantFunction(expr->target(), argument_count,
10062 syntactic_tail_call_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010063 } else {
10064 PushArgumentsFromEnvironment(argument_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010065 if (expr->is_uninitialized() &&
10066 expr->IsUsingCallFeedbackICSlot(isolate())) {
10067 // We've never seen this call before, so let's have Crankshaft learn
10068 // through the type vector.
Ben Murdochda12d292016-06-02 14:46:10 +010010069 call = NewCallFunctionViaIC(function, argument_count,
10070 syntactic_tail_call_mode,
10071 ConvertReceiverMode::kNullOrUndefined,
10072 tail_call_mode, expr->CallFeedbackICSlot());
10073 } else {
10074 call = NewCallFunction(
10075 function, argument_count, syntactic_tail_call_mode,
10076 ConvertReceiverMode::kNullOrUndefined, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010077 }
10078 }
10079 }
10080
10081 Drop(1); // Drop the function.
10082 return ast_context()->ReturnInstruction(call, expr->id());
10083}
10084
10085
10086void HOptimizedGraphBuilder::BuildInlinedCallArray(
10087 Expression* expression,
10088 int argument_count,
10089 Handle<AllocationSite> site) {
10090 DCHECK(!site.is_null());
10091 DCHECK(argument_count >= 0 && argument_count <= 1);
10092 NoObservableSideEffectsScope no_effects(this);
10093
10094 // We should at least have the constructor on the expression stack.
10095 HValue* constructor = environment()->ExpressionStackAt(argument_count);
10096
10097 // Register on the site for deoptimization if the transition feedback changes.
10098 top_info()->dependencies()->AssumeTransitionStable(site);
10099 ElementsKind kind = site->GetElementsKind();
10100 HInstruction* site_instruction = Add<HConstant>(site);
10101
10102 // In the single constant argument case, we may have to adjust elements kind
10103 // to avoid creating a packed non-empty array.
10104 if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
10105 HValue* argument = environment()->Top();
10106 if (argument->IsConstant()) {
10107 HConstant* constant_argument = HConstant::cast(argument);
10108 DCHECK(constant_argument->HasSmiValue());
10109 int constant_array_size = constant_argument->Integer32Value();
10110 if (constant_array_size != 0) {
10111 kind = GetHoleyElementsKind(kind);
10112 }
10113 }
10114 }
10115
10116 // Build the array.
10117 JSArrayBuilder array_builder(this,
10118 kind,
10119 site_instruction,
10120 constructor,
10121 DISABLE_ALLOCATION_SITES);
10122 HValue* new_object = argument_count == 0
10123 ? array_builder.AllocateEmptyArray()
10124 : BuildAllocateArrayFromLength(&array_builder, Top());
10125
10126 int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
10127 Drop(args_to_drop);
10128 ast_context()->ReturnValue(new_object);
10129}
10130
10131
10132// Checks whether allocation using the given constructor can be inlined.
10133static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
10134 return constructor->has_initial_map() &&
Ben Murdoch097c5b22016-05-18 11:27:45 +010010135 !IsSubclassConstructor(constructor->shared()->kind()) &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010136 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
10137 constructor->initial_map()->instance_size() <
10138 HAllocate::kMaxInlineSize;
10139}
10140
10141
10142bool HOptimizedGraphBuilder::IsCallArrayInlineable(
10143 int argument_count,
10144 Handle<AllocationSite> site) {
10145 Handle<JSFunction> caller = current_info()->closure();
10146 Handle<JSFunction> target = array_function();
10147 // We should have the function plus array arguments on the environment stack.
10148 DCHECK(environment()->length() >= (argument_count + 1));
10149 DCHECK(!site.is_null());
10150
10151 bool inline_ok = false;
10152 if (site->CanInlineCall()) {
10153 // We also want to avoid inlining in certain 1 argument scenarios.
10154 if (argument_count == 1) {
10155 HValue* argument = Top();
10156 if (argument->IsConstant()) {
10157 // Do not inline if the constant length argument is not a smi or
10158 // outside the valid range for unrolled loop initialization.
10159 HConstant* constant_argument = HConstant::cast(argument);
10160 if (constant_argument->HasSmiValue()) {
10161 int value = constant_argument->Integer32Value();
10162 inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
10163 if (!inline_ok) {
10164 TraceInline(target, caller,
10165 "Constant length outside of valid inlining range.");
10166 }
10167 }
10168 } else {
10169 TraceInline(target, caller,
10170 "Dont inline [new] Array(n) where n isn't constant.");
10171 }
10172 } else if (argument_count == 0) {
10173 inline_ok = true;
10174 } else {
10175 TraceInline(target, caller, "Too many arguments to inline.");
10176 }
10177 } else {
10178 TraceInline(target, caller, "AllocationSite requested no inlining.");
10179 }
10180
10181 if (inline_ok) {
10182 TraceInline(target, caller, NULL);
10183 }
10184 return inline_ok;
10185}
10186
10187
10188void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
10189 DCHECK(!HasStackOverflow());
10190 DCHECK(current_block() != NULL);
10191 DCHECK(current_block()->HasPredecessor());
10192 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
10193 int argument_count = expr->arguments()->length() + 1; // Plus constructor.
10194 Factory* factory = isolate()->factory();
10195
10196 // The constructor function is on the stack in the unoptimized code
10197 // during evaluation of the arguments.
10198 CHECK_ALIVE(VisitForValue(expr->expression()));
10199 HValue* function = Top();
10200 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10201
10202 if (function->IsConstant() &&
10203 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
10204 Handle<Object> constant = HConstant::cast(function)->handle(isolate());
10205 expr->SetKnownGlobalTarget(Handle<JSFunction>::cast(constant));
10206 }
10207
10208 if (FLAG_inline_construct &&
10209 expr->IsMonomorphic() &&
10210 IsAllocationInlineable(expr->target())) {
10211 Handle<JSFunction> constructor = expr->target();
10212 DCHECK(
10213 constructor->shared()->construct_stub() ==
10214 isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric) ||
10215 constructor->shared()->construct_stub() ==
10216 isolate()->builtins()->builtin(Builtins::kJSConstructStubApi));
10217 HValue* check = Add<HCheckValue>(function, constructor);
10218
10219 // Force completion of inobject slack tracking before generating
10220 // allocation code to finalize instance size.
10221 constructor->CompleteInobjectSlackTrackingIfActive();
10222
10223 // Calculate instance size from initial map of constructor.
10224 DCHECK(constructor->has_initial_map());
10225 Handle<Map> initial_map(constructor->initial_map());
10226 int instance_size = initial_map->instance_size();
10227
10228 // Allocate an instance of the implicit receiver object.
10229 HValue* size_in_bytes = Add<HConstant>(instance_size);
10230 HAllocationMode allocation_mode;
10231 HAllocate* receiver = BuildAllocate(
10232 size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
10233 receiver->set_known_initial_map(initial_map);
10234
10235 // Initialize map and fields of the newly allocated object.
10236 { NoObservableSideEffectsScope no_effects(this);
10237 DCHECK(initial_map->instance_type() == JS_OBJECT_TYPE);
10238 Add<HStoreNamedField>(receiver,
10239 HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
10240 Add<HConstant>(initial_map));
10241 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
10242 Add<HStoreNamedField>(receiver,
10243 HObjectAccess::ForMapAndOffset(initial_map,
10244 JSObject::kPropertiesOffset),
10245 empty_fixed_array);
10246 Add<HStoreNamedField>(receiver,
10247 HObjectAccess::ForMapAndOffset(initial_map,
10248 JSObject::kElementsOffset),
10249 empty_fixed_array);
10250 BuildInitializeInobjectProperties(receiver, initial_map);
10251 }
10252
10253 // Replace the constructor function with a newly allocated receiver using
10254 // the index of the receiver from the top of the expression stack.
10255 const int receiver_index = argument_count - 1;
10256 DCHECK(environment()->ExpressionStackAt(receiver_index) == function);
10257 environment()->SetExpressionStackAt(receiver_index, receiver);
10258
10259 if (TryInlineConstruct(expr, receiver)) {
10260 // Inlining worked, add a dependency on the initial map to make sure that
10261 // this code is deoptimized whenever the initial map of the constructor
10262 // changes.
10263 top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
10264 return;
10265 }
10266
10267 // TODO(mstarzinger): For now we remove the previous HAllocate and all
10268 // corresponding instructions and instead add HPushArguments for the
10269 // arguments in case inlining failed. What we actually should do is for
10270 // inlining to try to build a subgraph without mutating the parent graph.
10271 HInstruction* instr = current_block()->last();
10272 do {
10273 HInstruction* prev_instr = instr->previous();
10274 instr->DeleteAndReplaceWith(NULL);
10275 instr = prev_instr;
10276 } while (instr != check);
10277 environment()->SetExpressionStackAt(receiver_index, function);
10278 } else {
10279 // The constructor function is both an operand to the instruction and an
10280 // argument to the construct call.
10281 if (TryHandleArrayCallNew(expr, function)) return;
10282 }
10283
10284 HValue* arity = Add<HConstant>(argument_count - 1);
10285 HValue* op_vals[] = {context(), function, function, arity};
10286 Callable callable = CodeFactory::Construct(isolate());
10287 HConstant* stub = Add<HConstant>(callable.code());
10288 PushArgumentsFromEnvironment(argument_count);
Ben Murdochc5610432016-08-08 18:44:38 +010010289 HInstruction* construct = New<HCallWithDescriptor>(
10290 stub, argument_count, callable.descriptor(), ArrayVector(op_vals));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010291 return ast_context()->ReturnInstruction(construct, expr->id());
10292}
10293
10294
10295void HOptimizedGraphBuilder::BuildInitializeInobjectProperties(
10296 HValue* receiver, Handle<Map> initial_map) {
10297 if (initial_map->GetInObjectProperties() != 0) {
10298 HConstant* undefined = graph()->GetConstantUndefined();
10299 for (int i = 0; i < initial_map->GetInObjectProperties(); i++) {
10300 int property_offset = initial_map->GetInObjectPropertyOffset(i);
10301 Add<HStoreNamedField>(receiver, HObjectAccess::ForMapAndOffset(
10302 initial_map, property_offset),
10303 undefined);
10304 }
10305 }
10306}
10307
10308
10309HValue* HGraphBuilder::BuildAllocateEmptyArrayBuffer(HValue* byte_length) {
10310 // We HForceRepresentation here to avoid allocations during an *-to-tagged
10311 // HChange that could cause GC while the array buffer object is not fully
10312 // initialized.
10313 HObjectAccess byte_length_access(HObjectAccess::ForJSArrayBufferByteLength());
10314 byte_length = AddUncasted<HForceRepresentation>(
10315 byte_length, byte_length_access.representation());
10316 HAllocate* result =
10317 BuildAllocate(Add<HConstant>(JSArrayBuffer::kSizeWithInternalFields),
10318 HType::JSObject(), JS_ARRAY_BUFFER_TYPE, HAllocationMode());
10319
10320 HValue* native_context = BuildGetNativeContext();
10321 Add<HStoreNamedField>(
10322 result, HObjectAccess::ForMap(),
10323 Add<HLoadNamedField>(
10324 native_context, nullptr,
10325 HObjectAccess::ForContextSlot(Context::ARRAY_BUFFER_MAP_INDEX)));
10326
10327 HConstant* empty_fixed_array =
10328 Add<HConstant>(isolate()->factory()->empty_fixed_array());
10329 Add<HStoreNamedField>(
10330 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
10331 empty_fixed_array);
10332 Add<HStoreNamedField>(
10333 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
10334 empty_fixed_array);
10335 Add<HStoreNamedField>(
10336 result, HObjectAccess::ForJSArrayBufferBackingStore().WithRepresentation(
10337 Representation::Smi()),
10338 graph()->GetConstant0());
10339 Add<HStoreNamedField>(result, byte_length_access, byte_length);
10340 Add<HStoreNamedField>(result, HObjectAccess::ForJSArrayBufferBitFieldSlot(),
10341 graph()->GetConstant0());
10342 Add<HStoreNamedField>(
10343 result, HObjectAccess::ForJSArrayBufferBitField(),
10344 Add<HConstant>((1 << JSArrayBuffer::IsExternal::kShift) |
10345 (1 << JSArrayBuffer::IsNeuterable::kShift)));
10346
10347 for (int field = 0; field < v8::ArrayBuffer::kInternalFieldCount; ++field) {
10348 Add<HStoreNamedField>(
10349 result,
10350 HObjectAccess::ForObservableJSObjectOffset(
10351 JSArrayBuffer::kSize + field * kPointerSize, Representation::Smi()),
10352 graph()->GetConstant0());
10353 }
10354
10355 return result;
10356}
10357
10358
10359template <class ViewClass>
10360void HGraphBuilder::BuildArrayBufferViewInitialization(
10361 HValue* obj,
10362 HValue* buffer,
10363 HValue* byte_offset,
10364 HValue* byte_length) {
10365
10366 for (int offset = ViewClass::kSize;
10367 offset < ViewClass::kSizeWithInternalFields;
10368 offset += kPointerSize) {
10369 Add<HStoreNamedField>(obj,
10370 HObjectAccess::ForObservableJSObjectOffset(offset),
10371 graph()->GetConstant0());
10372 }
10373
10374 Add<HStoreNamedField>(
10375 obj,
10376 HObjectAccess::ForJSArrayBufferViewByteOffset(),
10377 byte_offset);
10378 Add<HStoreNamedField>(
10379 obj,
10380 HObjectAccess::ForJSArrayBufferViewByteLength(),
10381 byte_length);
10382 Add<HStoreNamedField>(obj, HObjectAccess::ForJSArrayBufferViewBuffer(),
10383 buffer);
10384}
10385
10386
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010387HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
10388 ExternalArrayType array_type,
10389 bool is_zero_byte_offset,
10390 HValue* buffer, HValue* byte_offset, HValue* length) {
10391 Handle<Map> external_array_map(
10392 isolate()->heap()->MapForFixedTypedArray(array_type));
10393
10394 // The HForceRepresentation is to prevent possible deopt on int-smi
10395 // conversion after allocation but before the new object fields are set.
10396 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10397 HValue* elements = Add<HAllocate>(
10398 Add<HConstant>(FixedTypedArrayBase::kHeaderSize), HType::HeapObject(),
Ben Murdochc5610432016-08-08 18:44:38 +010010399 NOT_TENURED, external_array_map->instance_type(),
10400 graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010401
10402 AddStoreMapConstant(elements, external_array_map);
10403 Add<HStoreNamedField>(elements,
10404 HObjectAccess::ForFixedArrayLength(), length);
10405
10406 HValue* backing_store = Add<HLoadNamedField>(
10407 buffer, nullptr, HObjectAccess::ForJSArrayBufferBackingStore());
10408
10409 HValue* typed_array_start;
10410 if (is_zero_byte_offset) {
10411 typed_array_start = backing_store;
10412 } else {
10413 HInstruction* external_pointer =
10414 AddUncasted<HAdd>(backing_store, byte_offset);
10415 // Arguments are checked prior to call to TypedArrayInitialize,
10416 // including byte_offset.
10417 external_pointer->ClearFlag(HValue::kCanOverflow);
10418 typed_array_start = external_pointer;
10419 }
10420
10421 Add<HStoreNamedField>(elements,
10422 HObjectAccess::ForFixedTypedArrayBaseBasePointer(),
10423 graph()->GetConstant0());
10424 Add<HStoreNamedField>(elements,
10425 HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
10426 typed_array_start);
10427
10428 return elements;
10429}
10430
10431
10432HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
10433 ExternalArrayType array_type, size_t element_size,
10434 ElementsKind fixed_elements_kind, HValue* byte_length, HValue* length,
10435 bool initialize) {
10436 STATIC_ASSERT(
10437 (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
10438 HValue* total_size;
10439
10440 // if fixed array's elements are not aligned to object's alignment,
10441 // we need to align the whole array to object alignment.
10442 if (element_size % kObjectAlignment != 0) {
10443 total_size = BuildObjectSizeAlignment(
10444 byte_length, FixedTypedArrayBase::kHeaderSize);
10445 } else {
10446 total_size = AddUncasted<HAdd>(byte_length,
10447 Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
10448 total_size->ClearFlag(HValue::kCanOverflow);
10449 }
10450
10451 // The HForceRepresentation is to prevent possible deopt on int-smi
10452 // conversion after allocation but before the new object fields are set.
10453 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10454 Handle<Map> fixed_typed_array_map(
10455 isolate()->heap()->MapForFixedTypedArray(array_type));
Ben Murdochc5610432016-08-08 18:44:38 +010010456 HAllocate* elements = Add<HAllocate>(
10457 total_size, HType::HeapObject(), NOT_TENURED,
10458 fixed_typed_array_map->instance_type(), graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010459
10460#ifndef V8_HOST_ARCH_64_BIT
10461 if (array_type == kExternalFloat64Array) {
10462 elements->MakeDoubleAligned();
10463 }
10464#endif
10465
10466 AddStoreMapConstant(elements, fixed_typed_array_map);
10467
10468 Add<HStoreNamedField>(elements,
10469 HObjectAccess::ForFixedArrayLength(),
10470 length);
10471 Add<HStoreNamedField>(
10472 elements, HObjectAccess::ForFixedTypedArrayBaseBasePointer(), elements);
10473
10474 Add<HStoreNamedField>(
10475 elements, HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
10476 Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()));
10477
10478 HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
10479
10480 if (initialize) {
10481 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
10482
10483 HValue* backing_store = AddUncasted<HAdd>(
10484 Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()),
Ben Murdoch097c5b22016-05-18 11:27:45 +010010485 elements, AddOfExternalAndTagged);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010486
10487 HValue* key = builder.BeginBody(
10488 Add<HConstant>(static_cast<int32_t>(0)),
10489 length, Token::LT);
10490 Add<HStoreKeyed>(backing_store, key, filler, elements, fixed_elements_kind);
10491
10492 builder.EndBody();
10493 }
10494 return elements;
10495}
10496
10497
10498void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
10499 CallRuntime* expr) {
10500 ZoneList<Expression*>* arguments = expr->arguments();
10501
10502 static const int kObjectArg = 0;
10503 static const int kArrayIdArg = 1;
10504 static const int kBufferArg = 2;
10505 static const int kByteOffsetArg = 3;
10506 static const int kByteLengthArg = 4;
10507 static const int kInitializeArg = 5;
10508 static const int kArgsLength = 6;
10509 DCHECK(arguments->length() == kArgsLength);
10510
10511
10512 CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
10513 HValue* obj = Pop();
10514
10515 if (!arguments->at(kArrayIdArg)->IsLiteral()) {
10516 // This should never happen in real use, but can happen when fuzzing.
10517 // Just bail out.
10518 Bailout(kNeedSmiLiteral);
10519 return;
10520 }
10521 Handle<Object> value =
10522 static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
10523 if (!value->IsSmi()) {
10524 // This should never happen in real use, but can happen when fuzzing.
10525 // Just bail out.
10526 Bailout(kNeedSmiLiteral);
10527 return;
10528 }
10529 int array_id = Smi::cast(*value)->value();
10530
10531 HValue* buffer;
10532 if (!arguments->at(kBufferArg)->IsNullLiteral()) {
10533 CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
10534 buffer = Pop();
10535 } else {
10536 buffer = NULL;
10537 }
10538
10539 HValue* byte_offset;
10540 bool is_zero_byte_offset;
10541
10542 if (arguments->at(kByteOffsetArg)->IsLiteral()
10543 && Smi::FromInt(0) ==
10544 *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
10545 byte_offset = Add<HConstant>(static_cast<int32_t>(0));
10546 is_zero_byte_offset = true;
10547 } else {
10548 CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
10549 byte_offset = Pop();
10550 is_zero_byte_offset = false;
10551 DCHECK(buffer != NULL);
10552 }
10553
10554 CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
10555 HValue* byte_length = Pop();
10556
10557 CHECK(arguments->at(kInitializeArg)->IsLiteral());
10558 bool initialize = static_cast<Literal*>(arguments->at(kInitializeArg))
10559 ->value()
10560 ->BooleanValue();
10561
10562 NoObservableSideEffectsScope scope(this);
10563 IfBuilder byte_offset_smi(this);
10564
10565 if (!is_zero_byte_offset) {
10566 byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
10567 byte_offset_smi.Then();
10568 }
10569
10570 ExternalArrayType array_type =
10571 kExternalInt8Array; // Bogus initialization.
10572 size_t element_size = 1; // Bogus initialization.
10573 ElementsKind fixed_elements_kind = // Bogus initialization.
10574 INT8_ELEMENTS;
10575 Runtime::ArrayIdToTypeAndSize(array_id,
10576 &array_type,
10577 &fixed_elements_kind,
10578 &element_size);
10579
10580
10581 { // byte_offset is Smi.
10582 HValue* allocated_buffer = buffer;
10583 if (buffer == NULL) {
10584 allocated_buffer = BuildAllocateEmptyArrayBuffer(byte_length);
10585 }
10586 BuildArrayBufferViewInitialization<JSTypedArray>(obj, allocated_buffer,
10587 byte_offset, byte_length);
10588
10589
10590 HInstruction* length = AddUncasted<HDiv>(byte_length,
10591 Add<HConstant>(static_cast<int32_t>(element_size)));
10592
10593 Add<HStoreNamedField>(obj,
10594 HObjectAccess::ForJSTypedArrayLength(),
10595 length);
10596
10597 HValue* elements;
10598 if (buffer != NULL) {
10599 elements = BuildAllocateExternalElements(
10600 array_type, is_zero_byte_offset, buffer, byte_offset, length);
10601 } else {
10602 DCHECK(is_zero_byte_offset);
10603 elements = BuildAllocateFixedTypedArray(array_type, element_size,
10604 fixed_elements_kind, byte_length,
10605 length, initialize);
10606 }
10607 Add<HStoreNamedField>(
10608 obj, HObjectAccess::ForElementsPointer(), elements);
10609 }
10610
10611 if (!is_zero_byte_offset) {
10612 byte_offset_smi.Else();
10613 { // byte_offset is not Smi.
10614 Push(obj);
10615 CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
10616 Push(buffer);
10617 Push(byte_offset);
10618 Push(byte_length);
10619 CHECK_ALIVE(VisitForValue(arguments->at(kInitializeArg)));
10620 PushArgumentsFromEnvironment(kArgsLength);
10621 Add<HCallRuntime>(expr->function(), kArgsLength);
10622 }
10623 }
10624 byte_offset_smi.End();
10625}
10626
10627
10628void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
10629 DCHECK(expr->arguments()->length() == 0);
10630 HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
10631 return ast_context()->ReturnInstruction(max_smi, expr->id());
10632}
10633
10634
10635void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
10636 CallRuntime* expr) {
10637 DCHECK(expr->arguments()->length() == 0);
10638 HConstant* result = New<HConstant>(static_cast<int32_t>(
10639 FLAG_typed_array_max_size_in_heap));
10640 return ast_context()->ReturnInstruction(result, expr->id());
10641}
10642
10643
10644void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
10645 CallRuntime* expr) {
10646 DCHECK(expr->arguments()->length() == 1);
10647 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10648 HValue* buffer = Pop();
10649 HInstruction* result = New<HLoadNamedField>(
10650 buffer, nullptr, HObjectAccess::ForJSArrayBufferByteLength());
10651 return ast_context()->ReturnInstruction(result, expr->id());
10652}
10653
10654
10655void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
10656 CallRuntime* expr) {
10657 NoObservableSideEffectsScope scope(this);
10658 DCHECK(expr->arguments()->length() == 1);
10659 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10660 HValue* view = Pop();
10661
10662 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10663 view, nullptr,
10664 FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteLengthOffset)));
10665}
10666
10667
10668void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
10669 CallRuntime* expr) {
10670 NoObservableSideEffectsScope scope(this);
10671 DCHECK(expr->arguments()->length() == 1);
10672 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10673 HValue* view = Pop();
10674
10675 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10676 view, nullptr,
10677 FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteOffsetOffset)));
10678}
10679
10680
10681void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
10682 CallRuntime* expr) {
10683 NoObservableSideEffectsScope scope(this);
10684 DCHECK(expr->arguments()->length() == 1);
10685 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10686 HValue* view = Pop();
10687
10688 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10689 view, nullptr,
10690 FieldIndex::ForInObjectOffset(JSTypedArray::kLengthOffset)));
10691}
10692
10693
10694void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
10695 DCHECK(!HasStackOverflow());
10696 DCHECK(current_block() != NULL);
10697 DCHECK(current_block()->HasPredecessor());
10698 if (expr->is_jsruntime()) {
Ben Murdochda12d292016-06-02 14:46:10 +010010699 // Crankshaft always specializes to the native context, so we can just grab
10700 // the constant function from the current native context and embed that into
10701 // the code object.
10702 Handle<JSFunction> known_function(
10703 JSFunction::cast(
10704 current_info()->native_context()->get(expr->context_index())),
10705 isolate());
10706
10707 // The callee and the receiver both have to be pushed onto the operand stack
10708 // before arguments are being evaluated.
10709 HConstant* function = Add<HConstant>(known_function);
10710 HValue* receiver = ImplicitReceiverFor(function, known_function);
10711 Push(function);
10712 Push(receiver);
10713
10714 int argument_count = expr->arguments()->length() + 1; // Count receiver.
10715 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10716 PushArgumentsFromEnvironment(argument_count);
10717 HInstruction* call = NewCallConstantFunction(known_function, argument_count,
10718 TailCallMode::kDisallow,
10719 TailCallMode::kDisallow);
10720 Drop(1); // Function
10721 return ast_context()->ReturnInstruction(call, expr->id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010722 }
10723
10724 const Runtime::Function* function = expr->function();
10725 DCHECK(function != NULL);
10726 switch (function->function_id) {
10727#define CALL_INTRINSIC_GENERATOR(Name) \
10728 case Runtime::kInline##Name: \
10729 return Generate##Name(expr);
10730
10731 FOR_EACH_HYDROGEN_INTRINSIC(CALL_INTRINSIC_GENERATOR)
10732#undef CALL_INTRINSIC_GENERATOR
10733 default: {
10734 int argument_count = expr->arguments()->length();
10735 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10736 PushArgumentsFromEnvironment(argument_count);
10737 HCallRuntime* call = New<HCallRuntime>(function, argument_count);
10738 return ast_context()->ReturnInstruction(call, expr->id());
10739 }
10740 }
10741}
10742
10743
10744void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
10745 DCHECK(!HasStackOverflow());
10746 DCHECK(current_block() != NULL);
10747 DCHECK(current_block()->HasPredecessor());
10748 switch (expr->op()) {
10749 case Token::DELETE: return VisitDelete(expr);
10750 case Token::VOID: return VisitVoid(expr);
10751 case Token::TYPEOF: return VisitTypeof(expr);
10752 case Token::NOT: return VisitNot(expr);
10753 default: UNREACHABLE();
10754 }
10755}
10756
10757
10758void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
10759 Property* prop = expr->expression()->AsProperty();
10760 VariableProxy* proxy = expr->expression()->AsVariableProxy();
10761 if (prop != NULL) {
10762 CHECK_ALIVE(VisitForValue(prop->obj()));
10763 CHECK_ALIVE(VisitForValue(prop->key()));
10764 HValue* key = Pop();
10765 HValue* obj = Pop();
10766 Add<HPushArguments>(obj, key);
10767 HInstruction* instr = New<HCallRuntime>(
10768 Runtime::FunctionForId(is_strict(function_language_mode())
10769 ? Runtime::kDeleteProperty_Strict
10770 : Runtime::kDeleteProperty_Sloppy),
10771 2);
10772 return ast_context()->ReturnInstruction(instr, expr->id());
10773 } else if (proxy != NULL) {
10774 Variable* var = proxy->var();
10775 if (var->IsUnallocatedOrGlobalSlot()) {
10776 Bailout(kDeleteWithGlobalVariable);
10777 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
10778 // Result of deleting non-global variables is false. 'this' is not really
10779 // a variable, though we implement it as one. The subexpression does not
10780 // have side effects.
10781 HValue* value = var->HasThisName(isolate()) ? graph()->GetConstantTrue()
10782 : graph()->GetConstantFalse();
10783 return ast_context()->ReturnValue(value);
10784 } else {
10785 Bailout(kDeleteWithNonGlobalVariable);
10786 }
10787 } else {
10788 // Result of deleting non-property, non-variable reference is true.
10789 // Evaluate the subexpression for side effects.
10790 CHECK_ALIVE(VisitForEffect(expr->expression()));
10791 return ast_context()->ReturnValue(graph()->GetConstantTrue());
10792 }
10793}
10794
10795
10796void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
10797 CHECK_ALIVE(VisitForEffect(expr->expression()));
10798 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10799}
10800
10801
10802void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
10803 CHECK_ALIVE(VisitForTypeOf(expr->expression()));
10804 HValue* value = Pop();
10805 HInstruction* instr = New<HTypeof>(value);
10806 return ast_context()->ReturnInstruction(instr, expr->id());
10807}
10808
10809
10810void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
10811 if (ast_context()->IsTest()) {
10812 TestContext* context = TestContext::cast(ast_context());
10813 VisitForControl(expr->expression(),
10814 context->if_false(),
10815 context->if_true());
10816 return;
10817 }
10818
10819 if (ast_context()->IsEffect()) {
10820 VisitForEffect(expr->expression());
10821 return;
10822 }
10823
10824 DCHECK(ast_context()->IsValue());
10825 HBasicBlock* materialize_false = graph()->CreateBasicBlock();
10826 HBasicBlock* materialize_true = graph()->CreateBasicBlock();
10827 CHECK_BAILOUT(VisitForControl(expr->expression(),
10828 materialize_false,
10829 materialize_true));
10830
10831 if (materialize_false->HasPredecessor()) {
10832 materialize_false->SetJoinId(expr->MaterializeFalseId());
10833 set_current_block(materialize_false);
10834 Push(graph()->GetConstantFalse());
10835 } else {
10836 materialize_false = NULL;
10837 }
10838
10839 if (materialize_true->HasPredecessor()) {
10840 materialize_true->SetJoinId(expr->MaterializeTrueId());
10841 set_current_block(materialize_true);
10842 Push(graph()->GetConstantTrue());
10843 } else {
10844 materialize_true = NULL;
10845 }
10846
10847 HBasicBlock* join =
10848 CreateJoin(materialize_false, materialize_true, expr->id());
10849 set_current_block(join);
10850 if (join != NULL) return ast_context()->ReturnValue(Pop());
10851}
10852
10853
10854static Representation RepresentationFor(Type* type) {
10855 DisallowHeapAllocation no_allocation;
10856 if (type->Is(Type::None())) return Representation::None();
10857 if (type->Is(Type::SignedSmall())) return Representation::Smi();
10858 if (type->Is(Type::Signed32())) return Representation::Integer32();
10859 if (type->Is(Type::Number())) return Representation::Double();
10860 return Representation::Tagged();
10861}
10862
10863
10864HInstruction* HOptimizedGraphBuilder::BuildIncrement(
10865 bool returns_original_input,
10866 CountOperation* expr) {
10867 // The input to the count operation is on top of the expression stack.
10868 Representation rep = RepresentationFor(expr->type());
10869 if (rep.IsNone() || rep.IsTagged()) {
10870 rep = Representation::Smi();
10871 }
10872
Ben Murdochda12d292016-06-02 14:46:10 +010010873 if (returns_original_input) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010874 // We need an explicit HValue representing ToNumber(input). The
10875 // actual HChange instruction we need is (sometimes) added in a later
10876 // phase, so it is not available now to be used as an input to HAdd and
10877 // as the return value.
10878 HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
10879 if (!rep.IsDouble()) {
10880 number_input->SetFlag(HInstruction::kFlexibleRepresentation);
10881 number_input->SetFlag(HInstruction::kCannotBeTagged);
10882 }
10883 Push(number_input);
10884 }
10885
10886 // The addition has no side effects, so we do not need
10887 // to simulate the expression stack after this instruction.
10888 // Any later failures deopt to the load of the input or earlier.
10889 HConstant* delta = (expr->op() == Token::INC)
10890 ? graph()->GetConstant1()
10891 : graph()->GetConstantMinus1();
Ben Murdoch097c5b22016-05-18 11:27:45 +010010892 HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010893 if (instr->IsAdd()) {
10894 HAdd* add = HAdd::cast(instr);
10895 add->set_observed_input_representation(1, rep);
10896 add->set_observed_input_representation(2, Representation::Smi());
10897 }
Ben Murdochda12d292016-06-02 14:46:10 +010010898 instr->ClearAllSideEffects();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010899 instr->SetFlag(HInstruction::kCannotBeTagged);
10900 return instr;
10901}
10902
10903
10904void HOptimizedGraphBuilder::BuildStoreForEffect(
10905 Expression* expr, Property* prop, FeedbackVectorSlot slot, BailoutId ast_id,
10906 BailoutId return_id, HValue* object, HValue* key, HValue* value) {
10907 EffectContext for_effect(this);
10908 Push(object);
10909 if (key != NULL) Push(key);
10910 Push(value);
10911 BuildStore(expr, prop, slot, ast_id, return_id);
10912}
10913
10914
10915void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
10916 DCHECK(!HasStackOverflow());
10917 DCHECK(current_block() != NULL);
10918 DCHECK(current_block()->HasPredecessor());
10919 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
10920 Expression* target = expr->expression();
10921 VariableProxy* proxy = target->AsVariableProxy();
10922 Property* prop = target->AsProperty();
10923 if (proxy == NULL && prop == NULL) {
10924 return Bailout(kInvalidLhsInCountOperation);
10925 }
10926
10927 // Match the full code generator stack by simulating an extra stack
10928 // element for postfix operations in a non-effect context. The return
10929 // value is ToNumber(input).
10930 bool returns_original_input =
10931 expr->is_postfix() && !ast_context()->IsEffect();
10932 HValue* input = NULL; // ToNumber(original_input).
10933 HValue* after = NULL; // The result after incrementing or decrementing.
10934
10935 if (proxy != NULL) {
10936 Variable* var = proxy->var();
10937 if (var->mode() == CONST_LEGACY) {
10938 return Bailout(kUnsupportedCountOperationWithConst);
10939 }
10940 if (var->mode() == CONST) {
10941 return Bailout(kNonInitializerAssignmentToConst);
10942 }
10943 // Argument of the count operation is a variable, not a property.
10944 DCHECK(prop == NULL);
10945 CHECK_ALIVE(VisitForValue(target));
10946
10947 after = BuildIncrement(returns_original_input, expr);
10948 input = returns_original_input ? Top() : Pop();
10949 Push(after);
10950
10951 switch (var->location()) {
10952 case VariableLocation::GLOBAL:
10953 case VariableLocation::UNALLOCATED:
10954 HandleGlobalVariableAssignment(var, after, expr->CountSlot(),
10955 expr->AssignmentId());
10956 break;
10957
10958 case VariableLocation::PARAMETER:
10959 case VariableLocation::LOCAL:
10960 BindIfLive(var, after);
10961 break;
10962
10963 case VariableLocation::CONTEXT: {
10964 // Bail out if we try to mutate a parameter value in a function
10965 // using the arguments object. We do not (yet) correctly handle the
10966 // arguments property of the function.
10967 if (current_info()->scope()->arguments() != NULL) {
10968 // Parameters will rewrite to context slots. We have no direct
10969 // way to detect that the variable is a parameter so we use a
10970 // linear search of the parameter list.
10971 int count = current_info()->scope()->num_parameters();
10972 for (int i = 0; i < count; ++i) {
10973 if (var == current_info()->scope()->parameter(i)) {
10974 return Bailout(kAssignmentToParameterInArgumentsObject);
10975 }
10976 }
10977 }
10978
10979 HValue* context = BuildContextChainWalk(var);
10980 HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
10981 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
10982 HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
10983 mode, after);
10984 if (instr->HasObservableSideEffects()) {
10985 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
10986 }
10987 break;
10988 }
10989
10990 case VariableLocation::LOOKUP:
10991 return Bailout(kLookupVariableInCountOperation);
10992 }
10993
10994 Drop(returns_original_input ? 2 : 1);
10995 return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
10996 }
10997
10998 // Argument of the count operation is a property.
10999 DCHECK(prop != NULL);
11000 if (returns_original_input) Push(graph()->GetConstantUndefined());
11001
11002 CHECK_ALIVE(VisitForValue(prop->obj()));
11003 HValue* object = Top();
11004
11005 HValue* key = NULL;
11006 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
11007 CHECK_ALIVE(VisitForValue(prop->key()));
11008 key = Top();
11009 }
11010
11011 CHECK_ALIVE(PushLoad(prop, object, key));
11012
11013 after = BuildIncrement(returns_original_input, expr);
11014
11015 if (returns_original_input) {
11016 input = Pop();
11017 // Drop object and key to push it again in the effect context below.
11018 Drop(key == NULL ? 1 : 2);
11019 environment()->SetExpressionStackAt(0, input);
11020 CHECK_ALIVE(BuildStoreForEffect(expr, prop, expr->CountSlot(), expr->id(),
11021 expr->AssignmentId(), object, key, after));
11022 return ast_context()->ReturnValue(Pop());
11023 }
11024
11025 environment()->SetExpressionStackAt(0, after);
11026 return BuildStore(expr, prop, expr->CountSlot(), expr->id(),
11027 expr->AssignmentId());
11028}
11029
11030
11031HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
11032 HValue* string,
11033 HValue* index) {
11034 if (string->IsConstant() && index->IsConstant()) {
11035 HConstant* c_string = HConstant::cast(string);
11036 HConstant* c_index = HConstant::cast(index);
11037 if (c_string->HasStringValue() && c_index->HasNumberValue()) {
11038 int32_t i = c_index->NumberValueAsInteger32();
11039 Handle<String> s = c_string->StringValue();
11040 if (i < 0 || i >= s->length()) {
11041 return New<HConstant>(std::numeric_limits<double>::quiet_NaN());
11042 }
11043 return New<HConstant>(s->Get(i));
11044 }
11045 }
11046 string = BuildCheckString(string);
11047 index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
11048 return New<HStringCharCodeAt>(string, index);
11049}
11050
11051
11052// Checks if the given shift amounts have following forms:
11053// (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
11054static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
11055 HValue* const32_minus_sa) {
11056 if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
11057 const HConstant* c1 = HConstant::cast(sa);
11058 const HConstant* c2 = HConstant::cast(const32_minus_sa);
11059 return c1->HasInteger32Value() && c2->HasInteger32Value() &&
11060 (c1->Integer32Value() + c2->Integer32Value() == 32);
11061 }
11062 if (!const32_minus_sa->IsSub()) return false;
11063 HSub* sub = HSub::cast(const32_minus_sa);
11064 return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
11065}
11066
11067
11068// Checks if the left and the right are shift instructions with the oposite
11069// directions that can be replaced by one rotate right instruction or not.
11070// Returns the operand and the shift amount for the rotate instruction in the
11071// former case.
11072bool HGraphBuilder::MatchRotateRight(HValue* left,
11073 HValue* right,
11074 HValue** operand,
11075 HValue** shift_amount) {
11076 HShl* shl;
11077 HShr* shr;
11078 if (left->IsShl() && right->IsShr()) {
11079 shl = HShl::cast(left);
11080 shr = HShr::cast(right);
11081 } else if (left->IsShr() && right->IsShl()) {
11082 shl = HShl::cast(right);
11083 shr = HShr::cast(left);
11084 } else {
11085 return false;
11086 }
11087 if (shl->left() != shr->left()) return false;
11088
11089 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
11090 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
11091 return false;
11092 }
11093 *operand = shr->left();
11094 *shift_amount = shr->right();
11095 return true;
11096}
11097
11098
11099bool CanBeZero(HValue* right) {
11100 if (right->IsConstant()) {
11101 HConstant* right_const = HConstant::cast(right);
11102 if (right_const->HasInteger32Value() &&
11103 (right_const->Integer32Value() & 0x1f) != 0) {
11104 return false;
11105 }
11106 }
11107 return true;
11108}
11109
11110
11111HValue* HGraphBuilder::EnforceNumberType(HValue* number,
11112 Type* expected) {
11113 if (expected->Is(Type::SignedSmall())) {
11114 return AddUncasted<HForceRepresentation>(number, Representation::Smi());
11115 }
11116 if (expected->Is(Type::Signed32())) {
11117 return AddUncasted<HForceRepresentation>(number,
11118 Representation::Integer32());
11119 }
11120 return number;
11121}
11122
11123
11124HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
11125 if (value->IsConstant()) {
11126 HConstant* constant = HConstant::cast(value);
11127 Maybe<HConstant*> number =
11128 constant->CopyToTruncatedNumber(isolate(), zone());
11129 if (number.IsJust()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011130 *expected = Type::Number();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011131 return AddInstruction(number.FromJust());
11132 }
11133 }
11134
11135 // We put temporary values on the stack, which don't correspond to anything
11136 // in baseline code. Since nothing is observable we avoid recording those
11137 // pushes with a NoObservableSideEffectsScope.
11138 NoObservableSideEffectsScope no_effects(this);
11139
11140 Type* expected_type = *expected;
11141
11142 // Separate the number type from the rest.
11143 Type* expected_obj =
Ben Murdoch097c5b22016-05-18 11:27:45 +010011144 Type::Intersect(expected_type, Type::NonNumber(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011145 Type* expected_number =
Ben Murdoch097c5b22016-05-18 11:27:45 +010011146 Type::Intersect(expected_type, Type::Number(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011147
11148 // We expect to get a number.
11149 // (We need to check first, since Type::None->Is(Type::Any()) == true.
11150 if (expected_obj->Is(Type::None())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011151 DCHECK(!expected_number->Is(Type::None()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011152 return value;
11153 }
11154
Ben Murdoch097c5b22016-05-18 11:27:45 +010011155 if (expected_obj->Is(Type::Undefined())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011156 // This is already done by HChange.
Ben Murdoch097c5b22016-05-18 11:27:45 +010011157 *expected = Type::Union(expected_number, Type::Number(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011158 return value;
11159 }
11160
11161 return value;
11162}
11163
11164
11165HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
11166 BinaryOperation* expr,
11167 HValue* left,
11168 HValue* right,
11169 PushBeforeSimulateBehavior push_sim_result) {
Ben Murdochc5610432016-08-08 18:44:38 +010011170 Type* left_type = bounds_.get(expr->left()).lower;
11171 Type* right_type = bounds_.get(expr->right()).lower;
11172 Type* result_type = bounds_.get(expr).lower;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011173 Maybe<int> fixed_right_arg = expr->fixed_right_arg();
11174 Handle<AllocationSite> allocation_site = expr->allocation_site();
11175
11176 HAllocationMode allocation_mode;
11177 if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) {
11178 allocation_mode = HAllocationMode(allocation_site);
11179 }
11180 HValue* result = HGraphBuilder::BuildBinaryOperation(
11181 expr->op(), left, right, left_type, right_type, result_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +010011182 fixed_right_arg, allocation_mode, expr->id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011183 // Add a simulate after instructions with observable side effects, and
11184 // after phis, which are the result of BuildBinaryOperation when we
11185 // inlined some complex subgraph.
11186 if (result->HasObservableSideEffects() || result->IsPhi()) {
11187 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
11188 Push(result);
11189 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
11190 Drop(1);
11191 } else {
11192 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
11193 }
11194 }
11195 return result;
11196}
11197
Ben Murdoch097c5b22016-05-18 11:27:45 +010011198HValue* HGraphBuilder::BuildBinaryOperation(Token::Value op, HValue* left,
11199 HValue* right, Type* left_type,
11200 Type* right_type, Type* result_type,
11201 Maybe<int> fixed_right_arg,
11202 HAllocationMode allocation_mode,
11203 BailoutId opt_id) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011204 bool maybe_string_add = false;
11205 if (op == Token::ADD) {
11206 // If we are adding constant string with something for which we don't have
11207 // a feedback yet, assume that it's also going to be a string and don't
11208 // generate deopt instructions.
11209 if (!left_type->IsInhabited() && right->IsConstant() &&
11210 HConstant::cast(right)->HasStringValue()) {
11211 left_type = Type::String();
11212 }
11213
11214 if (!right_type->IsInhabited() && left->IsConstant() &&
11215 HConstant::cast(left)->HasStringValue()) {
11216 right_type = Type::String();
11217 }
11218
11219 maybe_string_add = (left_type->Maybe(Type::String()) ||
11220 left_type->Maybe(Type::Receiver()) ||
11221 right_type->Maybe(Type::String()) ||
11222 right_type->Maybe(Type::Receiver()));
11223 }
11224
11225 Representation left_rep = RepresentationFor(left_type);
11226 Representation right_rep = RepresentationFor(right_type);
11227
11228 if (!left_type->IsInhabited()) {
11229 Add<HDeoptimize>(
11230 Deoptimizer::kInsufficientTypeFeedbackForLHSOfBinaryOperation,
11231 Deoptimizer::SOFT);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011232 left_type = Type::Any();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011233 left_rep = RepresentationFor(left_type);
11234 maybe_string_add = op == Token::ADD;
11235 }
11236
11237 if (!right_type->IsInhabited()) {
11238 Add<HDeoptimize>(
11239 Deoptimizer::kInsufficientTypeFeedbackForRHSOfBinaryOperation,
11240 Deoptimizer::SOFT);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011241 right_type = Type::Any();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011242 right_rep = RepresentationFor(right_type);
11243 maybe_string_add = op == Token::ADD;
11244 }
11245
Ben Murdoch097c5b22016-05-18 11:27:45 +010011246 if (!maybe_string_add) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011247 left = TruncateToNumber(left, &left_type);
11248 right = TruncateToNumber(right, &right_type);
11249 }
11250
11251 // Special case for string addition here.
11252 if (op == Token::ADD &&
11253 (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011254 // Validate type feedback for left argument.
11255 if (left_type->Is(Type::String())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011256 left = BuildCheckString(left);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011257 }
11258
11259 // Validate type feedback for right argument.
11260 if (right_type->Is(Type::String())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011261 right = BuildCheckString(right);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011262 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011263
Ben Murdoch097c5b22016-05-18 11:27:45 +010011264 // Convert left argument as necessary.
11265 if (left_type->Is(Type::Number())) {
11266 DCHECK(right_type->Is(Type::String()));
11267 left = BuildNumberToString(left, left_type);
11268 } else if (!left_type->Is(Type::String())) {
11269 DCHECK(right_type->Is(Type::String()));
11270 return AddUncasted<HStringAdd>(
11271 left, right, allocation_mode.GetPretenureMode(),
11272 STRING_ADD_CONVERT_LEFT, allocation_mode.feedback_site());
11273 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011274
Ben Murdoch097c5b22016-05-18 11:27:45 +010011275 // Convert right argument as necessary.
11276 if (right_type->Is(Type::Number())) {
11277 DCHECK(left_type->Is(Type::String()));
11278 right = BuildNumberToString(right, right_type);
11279 } else if (!right_type->Is(Type::String())) {
11280 DCHECK(left_type->Is(Type::String()));
11281 return AddUncasted<HStringAdd>(
11282 left, right, allocation_mode.GetPretenureMode(),
11283 STRING_ADD_CONVERT_RIGHT, allocation_mode.feedback_site());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011284 }
11285
11286 // Fast paths for empty constant strings.
11287 Handle<String> left_string =
11288 left->IsConstant() && HConstant::cast(left)->HasStringValue()
11289 ? HConstant::cast(left)->StringValue()
11290 : Handle<String>();
11291 Handle<String> right_string =
11292 right->IsConstant() && HConstant::cast(right)->HasStringValue()
11293 ? HConstant::cast(right)->StringValue()
11294 : Handle<String>();
11295 if (!left_string.is_null() && left_string->length() == 0) return right;
11296 if (!right_string.is_null() && right_string->length() == 0) return left;
11297 if (!left_string.is_null() && !right_string.is_null()) {
11298 return AddUncasted<HStringAdd>(
11299 left, right, allocation_mode.GetPretenureMode(),
11300 STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
11301 }
11302
11303 // Register the dependent code with the allocation site.
11304 if (!allocation_mode.feedback_site().is_null()) {
11305 DCHECK(!graph()->info()->IsStub());
11306 Handle<AllocationSite> site(allocation_mode.feedback_site());
11307 top_info()->dependencies()->AssumeTenuringDecision(site);
11308 }
11309
11310 // Inline the string addition into the stub when creating allocation
11311 // mementos to gather allocation site feedback, or if we can statically
11312 // infer that we're going to create a cons string.
11313 if ((graph()->info()->IsStub() &&
11314 allocation_mode.CreateAllocationMementos()) ||
11315 (left->IsConstant() &&
11316 HConstant::cast(left)->HasStringValue() &&
11317 HConstant::cast(left)->StringValue()->length() + 1 >=
11318 ConsString::kMinLength) ||
11319 (right->IsConstant() &&
11320 HConstant::cast(right)->HasStringValue() &&
11321 HConstant::cast(right)->StringValue()->length() + 1 >=
11322 ConsString::kMinLength)) {
11323 return BuildStringAdd(left, right, allocation_mode);
11324 }
11325
11326 // Fallback to using the string add stub.
11327 return AddUncasted<HStringAdd>(
11328 left, right, allocation_mode.GetPretenureMode(), STRING_ADD_CHECK_NONE,
11329 allocation_mode.feedback_site());
11330 }
11331
Ben Murdoch097c5b22016-05-18 11:27:45 +010011332 // Special case for +x here.
11333 if (op == Token::MUL) {
11334 if (left->EqualsInteger32Constant(1)) {
11335 return BuildToNumber(right);
11336 }
11337 if (right->EqualsInteger32Constant(1)) {
11338 return BuildToNumber(left);
11339 }
11340 }
11341
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011342 if (graph()->info()->IsStub()) {
11343 left = EnforceNumberType(left, left_type);
11344 right = EnforceNumberType(right, right_type);
11345 }
11346
11347 Representation result_rep = RepresentationFor(result_type);
11348
11349 bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
11350 (right_rep.IsTagged() && !right_rep.IsSmi());
11351
11352 HInstruction* instr = NULL;
11353 // Only the stub is allowed to call into the runtime, since otherwise we would
11354 // inline several instructions (including the two pushes) for every tagged
11355 // operation in optimized code, which is more expensive, than a stub call.
11356 if (graph()->info()->IsStub() && is_non_primitive) {
Ben Murdochc5610432016-08-08 18:44:38 +010011357 HValue* values[] = {context(), left, right};
11358#define GET_STUB(Name) \
11359 do { \
11360 Callable callable = CodeFactory::Name(isolate()); \
11361 HValue* stub = Add<HConstant>(callable.code()); \
11362 instr = AddUncasted<HCallWithDescriptor>(stub, 0, callable.descriptor(), \
11363 ArrayVector(values)); \
11364 } while (false)
11365
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011366 switch (op) {
11367 default:
11368 UNREACHABLE();
11369 case Token::ADD:
Ben Murdochc5610432016-08-08 18:44:38 +010011370 GET_STUB(Add);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011371 break;
11372 case Token::SUB:
Ben Murdochc5610432016-08-08 18:44:38 +010011373 GET_STUB(Subtract);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011374 break;
11375 case Token::MUL:
Ben Murdochc5610432016-08-08 18:44:38 +010011376 GET_STUB(Multiply);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011377 break;
11378 case Token::DIV:
Ben Murdochc5610432016-08-08 18:44:38 +010011379 GET_STUB(Divide);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011380 break;
11381 case Token::MOD:
Ben Murdochc5610432016-08-08 18:44:38 +010011382 GET_STUB(Modulus);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011383 break;
11384 case Token::BIT_OR:
Ben Murdochc5610432016-08-08 18:44:38 +010011385 GET_STUB(BitwiseOr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011386 break;
11387 case Token::BIT_AND:
Ben Murdochc5610432016-08-08 18:44:38 +010011388 GET_STUB(BitwiseAnd);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011389 break;
11390 case Token::BIT_XOR:
Ben Murdochc5610432016-08-08 18:44:38 +010011391 GET_STUB(BitwiseXor);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011392 break;
11393 case Token::SAR:
Ben Murdochc5610432016-08-08 18:44:38 +010011394 GET_STUB(ShiftRight);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011395 break;
11396 case Token::SHR:
Ben Murdochc5610432016-08-08 18:44:38 +010011397 GET_STUB(ShiftRightLogical);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011398 break;
11399 case Token::SHL:
Ben Murdochc5610432016-08-08 18:44:38 +010011400 GET_STUB(ShiftLeft);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011401 break;
11402 }
Ben Murdochc5610432016-08-08 18:44:38 +010011403#undef GET_STUB
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011404 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011405 switch (op) {
11406 case Token::ADD:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011407 instr = AddUncasted<HAdd>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011408 break;
11409 case Token::SUB:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011410 instr = AddUncasted<HSub>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011411 break;
11412 case Token::MUL:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011413 instr = AddUncasted<HMul>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011414 break;
11415 case Token::MOD: {
11416 if (fixed_right_arg.IsJust() &&
11417 !right->EqualsInteger32Constant(fixed_right_arg.FromJust())) {
11418 HConstant* fixed_right =
11419 Add<HConstant>(static_cast<int>(fixed_right_arg.FromJust()));
11420 IfBuilder if_same(this);
11421 if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
11422 if_same.Then();
11423 if_same.ElseDeopt(Deoptimizer::kUnexpectedRHSOfBinaryOperation);
11424 right = fixed_right;
11425 }
Ben Murdoch097c5b22016-05-18 11:27:45 +010011426 instr = AddUncasted<HMod>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011427 break;
11428 }
11429 case Token::DIV:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011430 instr = AddUncasted<HDiv>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011431 break;
11432 case Token::BIT_XOR:
11433 case Token::BIT_AND:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011434 instr = AddUncasted<HBitwise>(op, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011435 break;
11436 case Token::BIT_OR: {
11437 HValue *operand, *shift_amount;
11438 if (left_type->Is(Type::Signed32()) &&
11439 right_type->Is(Type::Signed32()) &&
11440 MatchRotateRight(left, right, &operand, &shift_amount)) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011441 instr = AddUncasted<HRor>(operand, shift_amount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011442 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011443 instr = AddUncasted<HBitwise>(op, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011444 }
11445 break;
11446 }
11447 case Token::SAR:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011448 instr = AddUncasted<HSar>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011449 break;
11450 case Token::SHR:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011451 instr = AddUncasted<HShr>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011452 if (instr->IsShr() && CanBeZero(right)) {
11453 graph()->RecordUint32Instruction(instr);
11454 }
11455 break;
11456 case Token::SHL:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011457 instr = AddUncasted<HShl>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011458 break;
11459 default:
11460 UNREACHABLE();
11461 }
11462 }
11463
11464 if (instr->IsBinaryOperation()) {
11465 HBinaryOperation* binop = HBinaryOperation::cast(instr);
11466 binop->set_observed_input_representation(1, left_rep);
11467 binop->set_observed_input_representation(2, right_rep);
11468 binop->initialize_output_representation(result_rep);
11469 if (graph()->info()->IsStub()) {
11470 // Stub should not call into stub.
11471 instr->SetFlag(HValue::kCannotBeTagged);
11472 // And should truncate on HForceRepresentation already.
11473 if (left->IsForceRepresentation()) {
11474 left->CopyFlag(HValue::kTruncatingToSmi, instr);
11475 left->CopyFlag(HValue::kTruncatingToInt32, instr);
11476 }
11477 if (right->IsForceRepresentation()) {
11478 right->CopyFlag(HValue::kTruncatingToSmi, instr);
11479 right->CopyFlag(HValue::kTruncatingToInt32, instr);
11480 }
11481 }
11482 }
11483 return instr;
11484}
11485
11486
11487// Check for the form (%_ClassOf(foo) === 'BarClass').
11488static bool IsClassOfTest(CompareOperation* expr) {
11489 if (expr->op() != Token::EQ_STRICT) return false;
11490 CallRuntime* call = expr->left()->AsCallRuntime();
11491 if (call == NULL) return false;
11492 Literal* literal = expr->right()->AsLiteral();
11493 if (literal == NULL) return false;
11494 if (!literal->value()->IsString()) return false;
11495 if (!call->is_jsruntime() &&
11496 call->function()->function_id != Runtime::kInlineClassOf) {
11497 return false;
11498 }
11499 DCHECK(call->arguments()->length() == 1);
11500 return true;
11501}
11502
11503
11504void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
11505 DCHECK(!HasStackOverflow());
11506 DCHECK(current_block() != NULL);
11507 DCHECK(current_block()->HasPredecessor());
11508 switch (expr->op()) {
11509 case Token::COMMA:
11510 return VisitComma(expr);
11511 case Token::OR:
11512 case Token::AND:
11513 return VisitLogicalExpression(expr);
11514 default:
11515 return VisitArithmeticExpression(expr);
11516 }
11517}
11518
11519
11520void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
11521 CHECK_ALIVE(VisitForEffect(expr->left()));
11522 // Visit the right subexpression in the same AST context as the entire
11523 // expression.
11524 Visit(expr->right());
11525}
11526
11527
11528void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
11529 bool is_logical_and = expr->op() == Token::AND;
11530 if (ast_context()->IsTest()) {
11531 TestContext* context = TestContext::cast(ast_context());
11532 // Translate left subexpression.
11533 HBasicBlock* eval_right = graph()->CreateBasicBlock();
11534 if (is_logical_and) {
11535 CHECK_BAILOUT(VisitForControl(expr->left(),
11536 eval_right,
11537 context->if_false()));
11538 } else {
11539 CHECK_BAILOUT(VisitForControl(expr->left(),
11540 context->if_true(),
11541 eval_right));
11542 }
11543
11544 // Translate right subexpression by visiting it in the same AST
11545 // context as the entire expression.
Ben Murdochda12d292016-06-02 14:46:10 +010011546 CHECK(eval_right->HasPredecessor());
11547 eval_right->SetJoinId(expr->RightId());
11548 set_current_block(eval_right);
11549 Visit(expr->right());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011550 } else if (ast_context()->IsValue()) {
11551 CHECK_ALIVE(VisitForValue(expr->left()));
11552 DCHECK(current_block() != NULL);
11553 HValue* left_value = Top();
11554
11555 // Short-circuit left values that always evaluate to the same boolean value.
11556 if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
11557 // l (evals true) && r -> r
11558 // l (evals true) || r -> l
11559 // l (evals false) && r -> l
11560 // l (evals false) || r -> r
11561 if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
11562 Drop(1);
11563 CHECK_ALIVE(VisitForValue(expr->right()));
11564 }
11565 return ast_context()->ReturnValue(Pop());
11566 }
11567
11568 // We need an extra block to maintain edge-split form.
11569 HBasicBlock* empty_block = graph()->CreateBasicBlock();
11570 HBasicBlock* eval_right = graph()->CreateBasicBlock();
Ben Murdochda12d292016-06-02 14:46:10 +010011571 ToBooleanICStub::Types expected(expr->left()->to_boolean_types());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011572 HBranch* test = is_logical_and
11573 ? New<HBranch>(left_value, expected, eval_right, empty_block)
11574 : New<HBranch>(left_value, expected, empty_block, eval_right);
11575 FinishCurrentBlock(test);
11576
11577 set_current_block(eval_right);
11578 Drop(1); // Value of the left subexpression.
11579 CHECK_BAILOUT(VisitForValue(expr->right()));
11580
11581 HBasicBlock* join_block =
11582 CreateJoin(empty_block, current_block(), expr->id());
11583 set_current_block(join_block);
11584 return ast_context()->ReturnValue(Pop());
11585
11586 } else {
11587 DCHECK(ast_context()->IsEffect());
11588 // In an effect context, we don't need the value of the left subexpression,
11589 // only its control flow and side effects. We need an extra block to
11590 // maintain edge-split form.
11591 HBasicBlock* empty_block = graph()->CreateBasicBlock();
11592 HBasicBlock* right_block = graph()->CreateBasicBlock();
11593 if (is_logical_and) {
11594 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
11595 } else {
11596 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
11597 }
11598
11599 // TODO(kmillikin): Find a way to fix this. It's ugly that there are
11600 // actually two empty blocks (one here and one inserted by
11601 // TestContext::BuildBranch, and that they both have an HSimulate though the
11602 // second one is not a merge node, and that we really have no good AST ID to
11603 // put on that first HSimulate.
11604
Ben Murdochda12d292016-06-02 14:46:10 +010011605 // Technically, we should be able to handle the case when one side of
11606 // the test is not connected, but this can trip up liveness analysis
11607 // if we did not fully connect the test context based on some optimistic
11608 // assumption. If such an assumption was violated, we would end up with
11609 // an environment with optimized-out values. So we should always
11610 // conservatively connect the test context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011611
Ben Murdochda12d292016-06-02 14:46:10 +010011612 CHECK(right_block->HasPredecessor());
11613 CHECK(empty_block->HasPredecessor());
11614
11615 empty_block->SetJoinId(expr->id());
11616
11617 right_block->SetJoinId(expr->RightId());
11618 set_current_block(right_block);
11619 CHECK_BAILOUT(VisitForEffect(expr->right()));
11620 right_block = current_block();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011621
11622 HBasicBlock* join_block =
11623 CreateJoin(empty_block, right_block, expr->id());
11624 set_current_block(join_block);
11625 // We did not materialize any value in the predecessor environments,
11626 // so there is no need to handle it here.
11627 }
11628}
11629
11630
11631void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
11632 CHECK_ALIVE(VisitForValue(expr->left()));
11633 CHECK_ALIVE(VisitForValue(expr->right()));
11634 SetSourcePosition(expr->position());
11635 HValue* right = Pop();
11636 HValue* left = Pop();
11637 HValue* result =
11638 BuildBinaryOperation(expr, left, right,
11639 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11640 : PUSH_BEFORE_SIMULATE);
11641 if (top_info()->is_tracking_positions() && result->IsBinaryOperation()) {
11642 HBinaryOperation::cast(result)->SetOperandPositions(
11643 zone(),
11644 ScriptPositionToSourcePosition(expr->left()->position()),
11645 ScriptPositionToSourcePosition(expr->right()->position()));
11646 }
11647 return ast_context()->ReturnValue(result);
11648}
11649
11650
11651void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
11652 Expression* sub_expr,
11653 Handle<String> check) {
11654 CHECK_ALIVE(VisitForTypeOf(sub_expr));
11655 SetSourcePosition(expr->position());
11656 HValue* value = Pop();
11657 HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
11658 return ast_context()->ReturnControl(instr, expr->id());
11659}
11660
Ben Murdoch61f157c2016-09-16 13:49:30 +010011661namespace {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011662
Ben Murdoch61f157c2016-09-16 13:49:30 +010011663bool IsLiteralCompareStrict(Isolate* isolate, HValue* left, Token::Value op,
11664 HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011665 return op == Token::EQ_STRICT &&
Ben Murdoch61f157c2016-09-16 13:49:30 +010011666 ((left->IsConstant() &&
11667 !HConstant::cast(left)->handle(isolate)->IsNumber() &&
11668 !HConstant::cast(left)->handle(isolate)->IsSimd128Value() &&
11669 !HConstant::cast(left)->handle(isolate)->IsString()) ||
11670 (right->IsConstant() &&
11671 !HConstant::cast(right)->handle(isolate)->IsNumber() &&
11672 !HConstant::cast(right)->handle(isolate)->IsSimd128Value() &&
11673 !HConstant::cast(right)->handle(isolate)->IsString()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011674}
11675
Ben Murdoch61f157c2016-09-16 13:49:30 +010011676} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011677
11678void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
11679 DCHECK(!HasStackOverflow());
11680 DCHECK(current_block() != NULL);
11681 DCHECK(current_block()->HasPredecessor());
11682
11683 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
11684
11685 // Check for a few fast cases. The AST visiting behavior must be in sync
11686 // with the full codegen: We don't push both left and right values onto
11687 // the expression stack when one side is a special-case literal.
11688 Expression* sub_expr = NULL;
11689 Handle<String> check;
11690 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
11691 return HandleLiteralCompareTypeof(expr, sub_expr, check);
11692 }
Ben Murdochda12d292016-06-02 14:46:10 +010011693 if (expr->IsLiteralCompareUndefined(&sub_expr)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011694 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
11695 }
11696 if (expr->IsLiteralCompareNull(&sub_expr)) {
11697 return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
11698 }
11699
11700 if (IsClassOfTest(expr)) {
11701 CallRuntime* call = expr->left()->AsCallRuntime();
11702 DCHECK(call->arguments()->length() == 1);
11703 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11704 HValue* value = Pop();
11705 Literal* literal = expr->right()->AsLiteral();
11706 Handle<String> rhs = Handle<String>::cast(literal->value());
11707 HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
11708 return ast_context()->ReturnControl(instr, expr->id());
11709 }
11710
Ben Murdochc5610432016-08-08 18:44:38 +010011711 Type* left_type = bounds_.get(expr->left()).lower;
11712 Type* right_type = bounds_.get(expr->right()).lower;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011713 Type* combined_type = expr->combined_type();
11714
11715 CHECK_ALIVE(VisitForValue(expr->left()));
11716 CHECK_ALIVE(VisitForValue(expr->right()));
11717
11718 HValue* right = Pop();
11719 HValue* left = Pop();
11720 Token::Value op = expr->op();
11721
Ben Murdoch61f157c2016-09-16 13:49:30 +010011722 if (IsLiteralCompareStrict(isolate(), left, op, right)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011723 HCompareObjectEqAndBranch* result =
11724 New<HCompareObjectEqAndBranch>(left, right);
11725 return ast_context()->ReturnControl(result, expr->id());
11726 }
11727
11728 if (op == Token::INSTANCEOF) {
11729 // Check to see if the rhs of the instanceof is a known function.
11730 if (right->IsConstant() &&
11731 HConstant::cast(right)->handle(isolate())->IsJSFunction()) {
Ben Murdochc5610432016-08-08 18:44:38 +010011732 Handle<JSFunction> function =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011733 Handle<JSFunction>::cast(HConstant::cast(right)->handle(isolate()));
Ben Murdochc5610432016-08-08 18:44:38 +010011734 // Make sure the prototype of {function} is the %FunctionPrototype%, and
11735 // it already has a meaningful initial map (i.e. we constructed at least
11736 // one instance using the constructor {function}).
11737 // We can only use the fast case if @@hasInstance was not used so far.
11738 if (function->has_initial_map() &&
11739 function->map()->prototype() ==
11740 function->native_context()->closure() &&
11741 !function->map()->has_non_instance_prototype() &&
11742 isolate()->IsHasInstanceLookupChainIntact()) {
11743 Handle<Map> initial_map(function->initial_map(), isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011744 top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
Ben Murdochc5610432016-08-08 18:44:38 +010011745 top_info()->dependencies()->AssumePropertyCell(
11746 isolate()->factory()->has_instance_protector());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011747 HInstruction* prototype =
11748 Add<HConstant>(handle(initial_map->prototype(), isolate()));
11749 HHasInPrototypeChainAndBranch* result =
11750 New<HHasInPrototypeChainAndBranch>(left, prototype);
11751 return ast_context()->ReturnControl(result, expr->id());
11752 }
11753 }
11754
Ben Murdochc5610432016-08-08 18:44:38 +010011755 Callable callable = CodeFactory::InstanceOf(isolate());
11756 HValue* stub = Add<HConstant>(callable.code());
11757 HValue* values[] = {context(), left, right};
11758 HCallWithDescriptor* result = New<HCallWithDescriptor>(
11759 stub, 0, callable.descriptor(), ArrayVector(values));
11760 result->set_type(HType::Boolean());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011761 return ast_context()->ReturnInstruction(result, expr->id());
11762
11763 } else if (op == Token::IN) {
Ben Murdochc5610432016-08-08 18:44:38 +010011764 Callable callable = CodeFactory::HasProperty(isolate());
11765 HValue* stub = Add<HConstant>(callable.code());
11766 HValue* values[] = {context(), left, right};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011767 HInstruction* result =
Ben Murdochc5610432016-08-08 18:44:38 +010011768 New<HCallWithDescriptor>(stub, 0, callable.descriptor(),
11769 Vector<HValue*>(values, arraysize(values)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011770 return ast_context()->ReturnInstruction(result, expr->id());
11771 }
11772
11773 PushBeforeSimulateBehavior push_behavior =
11774 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11775 : PUSH_BEFORE_SIMULATE;
11776 HControlInstruction* compare = BuildCompareInstruction(
11777 op, left, right, left_type, right_type, combined_type,
11778 ScriptPositionToSourcePosition(expr->left()->position()),
11779 ScriptPositionToSourcePosition(expr->right()->position()),
11780 push_behavior, expr->id());
11781 if (compare == NULL) return; // Bailed out.
11782 return ast_context()->ReturnControl(compare, expr->id());
11783}
11784
11785
11786HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
11787 Token::Value op, HValue* left, HValue* right, Type* left_type,
11788 Type* right_type, Type* combined_type, SourcePosition left_position,
11789 SourcePosition right_position, PushBeforeSimulateBehavior push_sim_result,
11790 BailoutId bailout_id) {
11791 // Cases handled below depend on collected type feedback. They should
11792 // soft deoptimize when there is no type feedback.
11793 if (!combined_type->IsInhabited()) {
11794 Add<HDeoptimize>(
11795 Deoptimizer::kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation,
11796 Deoptimizer::SOFT);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011797 combined_type = left_type = right_type = Type::Any();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011798 }
11799
11800 Representation left_rep = RepresentationFor(left_type);
11801 Representation right_rep = RepresentationFor(right_type);
11802 Representation combined_rep = RepresentationFor(combined_type);
11803
11804 if (combined_type->Is(Type::Receiver())) {
11805 if (Token::IsEqualityOp(op)) {
11806 // HCompareObjectEqAndBranch can only deal with object, so
11807 // exclude numbers.
11808 if ((left->IsConstant() &&
11809 HConstant::cast(left)->HasNumberValue()) ||
11810 (right->IsConstant() &&
11811 HConstant::cast(right)->HasNumberValue())) {
11812 Add<HDeoptimize>(Deoptimizer::kTypeMismatchBetweenFeedbackAndConstant,
11813 Deoptimizer::SOFT);
11814 // The caller expects a branch instruction, so make it happy.
11815 return New<HBranch>(graph()->GetConstantTrue());
11816 }
11817 // Can we get away with map check and not instance type check?
11818 HValue* operand_to_check =
11819 left->block()->block_id() < right->block()->block_id() ? left : right;
11820 if (combined_type->IsClass()) {
11821 Handle<Map> map = combined_type->AsClass()->Map();
11822 AddCheckMap(operand_to_check, map);
11823 HCompareObjectEqAndBranch* result =
11824 New<HCompareObjectEqAndBranch>(left, right);
11825 if (top_info()->is_tracking_positions()) {
11826 result->set_operand_position(zone(), 0, left_position);
11827 result->set_operand_position(zone(), 1, right_position);
11828 }
11829 return result;
11830 } else {
11831 BuildCheckHeapObject(operand_to_check);
11832 Add<HCheckInstanceType>(operand_to_check,
11833 HCheckInstanceType::IS_JS_RECEIVER);
11834 HCompareObjectEqAndBranch* result =
11835 New<HCompareObjectEqAndBranch>(left, right);
11836 return result;
11837 }
11838 } else {
11839 if (combined_type->IsClass()) {
11840 // TODO(bmeurer): This is an optimized version of an x < y, x > y,
11841 // x <= y or x >= y, where both x and y are spec objects with the
11842 // same map. The CompareIC collects this map for us. So if we know
11843 // that there's no @@toPrimitive on the map (including the prototype
11844 // chain), and both valueOf and toString are the default initial
11845 // implementations (on the %ObjectPrototype%), then we can reduce
11846 // the comparison to map checks on x and y, because the comparison
11847 // will turn into a comparison of "[object CLASS]" to itself (the
11848 // default outcome of toString, since valueOf returns a spec object).
11849 // This is pretty much adhoc, so in TurboFan we could do a lot better
11850 // and inline the interesting parts of ToPrimitive (actually we could
11851 // even do that in Crankshaft but we don't want to waste too much
11852 // time on this now).
11853 DCHECK(Token::IsOrderedRelationalCompareOp(op));
11854 Handle<Map> map = combined_type->AsClass()->Map();
11855 PropertyAccessInfo value_of(this, LOAD, map,
11856 isolate()->factory()->valueOf_string());
11857 PropertyAccessInfo to_primitive(
11858 this, LOAD, map, isolate()->factory()->to_primitive_symbol());
11859 PropertyAccessInfo to_string(this, LOAD, map,
11860 isolate()->factory()->toString_string());
11861 PropertyAccessInfo to_string_tag(
11862 this, LOAD, map, isolate()->factory()->to_string_tag_symbol());
11863 if (to_primitive.CanAccessMonomorphic() && !to_primitive.IsFound() &&
11864 to_string_tag.CanAccessMonomorphic() &&
11865 (!to_string_tag.IsFound() || to_string_tag.IsData() ||
11866 to_string_tag.IsDataConstant()) &&
11867 value_of.CanAccessMonomorphic() && value_of.IsDataConstant() &&
11868 value_of.constant().is_identical_to(isolate()->object_value_of()) &&
11869 to_string.CanAccessMonomorphic() && to_string.IsDataConstant() &&
11870 to_string.constant().is_identical_to(
11871 isolate()->object_to_string())) {
11872 // We depend on the prototype chain to stay the same, because we
11873 // also need to deoptimize when someone installs @@toPrimitive
11874 // or @@toStringTag somewhere in the prototype chain.
11875 BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
11876 Handle<JSObject>::null());
11877 AddCheckMap(left, map);
11878 AddCheckMap(right, map);
11879 // The caller expects a branch instruction, so make it happy.
11880 return New<HBranch>(
11881 graph()->GetConstantBool(op == Token::LTE || op == Token::GTE));
11882 }
11883 }
11884 Bailout(kUnsupportedNonPrimitiveCompare);
11885 return NULL;
11886 }
11887 } else if (combined_type->Is(Type::InternalizedString()) &&
11888 Token::IsEqualityOp(op)) {
11889 // If we have a constant argument, it should be consistent with the type
11890 // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
11891 if ((left->IsConstant() &&
11892 !HConstant::cast(left)->HasInternalizedStringValue()) ||
11893 (right->IsConstant() &&
11894 !HConstant::cast(right)->HasInternalizedStringValue())) {
11895 Add<HDeoptimize>(Deoptimizer::kTypeMismatchBetweenFeedbackAndConstant,
11896 Deoptimizer::SOFT);
11897 // The caller expects a branch instruction, so make it happy.
11898 return New<HBranch>(graph()->GetConstantTrue());
11899 }
11900 BuildCheckHeapObject(left);
11901 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
11902 BuildCheckHeapObject(right);
11903 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
11904 HCompareObjectEqAndBranch* result =
11905 New<HCompareObjectEqAndBranch>(left, right);
11906 return result;
11907 } else if (combined_type->Is(Type::String())) {
11908 BuildCheckHeapObject(left);
11909 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
11910 BuildCheckHeapObject(right);
11911 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
11912 HStringCompareAndBranch* result =
11913 New<HStringCompareAndBranch>(left, right, op);
11914 return result;
11915 } else if (combined_type->Is(Type::Boolean())) {
11916 AddCheckMap(left, isolate()->factory()->boolean_map());
11917 AddCheckMap(right, isolate()->factory()->boolean_map());
11918 if (Token::IsEqualityOp(op)) {
11919 HCompareObjectEqAndBranch* result =
11920 New<HCompareObjectEqAndBranch>(left, right);
11921 return result;
11922 }
11923 left = Add<HLoadNamedField>(
11924 left, nullptr,
11925 HObjectAccess::ForOddballToNumber(Representation::Smi()));
11926 right = Add<HLoadNamedField>(
11927 right, nullptr,
11928 HObjectAccess::ForOddballToNumber(Representation::Smi()));
11929 HCompareNumericAndBranch* result =
11930 New<HCompareNumericAndBranch>(left, right, op);
11931 return result;
11932 } else {
Ben Murdochda12d292016-06-02 14:46:10 +010011933 if (op == Token::EQ) {
11934 if (left->IsConstant() &&
11935 HConstant::cast(left)->GetInstanceType() == ODDBALL_TYPE &&
11936 HConstant::cast(left)->IsUndetectable()) {
11937 return New<HIsUndetectableAndBranch>(right);
11938 }
11939
11940 if (right->IsConstant() &&
11941 HConstant::cast(right)->GetInstanceType() == ODDBALL_TYPE &&
11942 HConstant::cast(right)->IsUndetectable()) {
11943 return New<HIsUndetectableAndBranch>(left);
11944 }
11945 }
11946
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011947 if (combined_rep.IsTagged() || combined_rep.IsNone()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011948 HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011949 result->set_observed_input_representation(1, left_rep);
11950 result->set_observed_input_representation(2, right_rep);
11951 if (result->HasObservableSideEffects()) {
11952 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
11953 Push(result);
11954 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11955 Drop(1);
11956 } else {
11957 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11958 }
11959 }
11960 // TODO(jkummerow): Can we make this more efficient?
11961 HBranch* branch = New<HBranch>(result);
11962 return branch;
11963 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011964 HCompareNumericAndBranch* result =
11965 New<HCompareNumericAndBranch>(left, right, op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011966 result->set_observed_input_representation(left_rep, right_rep);
11967 if (top_info()->is_tracking_positions()) {
11968 result->SetOperandPositions(zone(), left_position, right_position);
11969 }
11970 return result;
11971 }
11972 }
11973}
11974
11975
11976void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
11977 Expression* sub_expr,
11978 NilValue nil) {
11979 DCHECK(!HasStackOverflow());
11980 DCHECK(current_block() != NULL);
11981 DCHECK(current_block()->HasPredecessor());
11982 DCHECK(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
11983 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
11984 CHECK_ALIVE(VisitForValue(sub_expr));
11985 HValue* value = Pop();
Ben Murdochda12d292016-06-02 14:46:10 +010011986 HControlInstruction* instr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011987 if (expr->op() == Token::EQ_STRICT) {
11988 HConstant* nil_constant = nil == kNullValue
11989 ? graph()->GetConstantNull()
11990 : graph()->GetConstantUndefined();
Ben Murdochda12d292016-06-02 14:46:10 +010011991 instr = New<HCompareObjectEqAndBranch>(value, nil_constant);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011992 } else {
11993 DCHECK_EQ(Token::EQ, expr->op());
Ben Murdochda12d292016-06-02 14:46:10 +010011994 instr = New<HIsUndetectableAndBranch>(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011995 }
Ben Murdochda12d292016-06-02 14:46:10 +010011996 return ast_context()->ReturnControl(instr, expr->id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011997}
11998
11999
12000void HOptimizedGraphBuilder::VisitSpread(Spread* expr) { UNREACHABLE(); }
12001
12002
12003void HOptimizedGraphBuilder::VisitEmptyParentheses(EmptyParentheses* expr) {
12004 UNREACHABLE();
12005}
12006
12007
12008HValue* HOptimizedGraphBuilder::AddThisFunction() {
12009 return AddInstruction(BuildThisFunction());
12010}
12011
12012
12013HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
12014 // If we share optimized code between different closures, the
12015 // this-function is not a constant, except inside an inlined body.
12016 if (function_state()->outer() != NULL) {
12017 return New<HConstant>(
12018 function_state()->compilation_info()->closure());
12019 } else {
12020 return New<HThisFunction>();
12021 }
12022}
12023
12024
12025HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
12026 Handle<JSObject> boilerplate_object,
12027 AllocationSiteUsageContext* site_context) {
12028 NoObservableSideEffectsScope no_effects(this);
12029 Handle<Map> initial_map(boilerplate_object->map());
12030 InstanceType instance_type = initial_map->instance_type();
12031 DCHECK(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
12032
12033 HType type = instance_type == JS_ARRAY_TYPE
12034 ? HType::JSArray() : HType::JSObject();
12035 HValue* object_size_constant = Add<HConstant>(initial_map->instance_size());
12036
12037 PretenureFlag pretenure_flag = NOT_TENURED;
12038 Handle<AllocationSite> top_site(*site_context->top(), isolate());
12039 if (FLAG_allocation_site_pretenuring) {
12040 pretenure_flag = top_site->GetPretenureMode();
12041 }
12042
12043 Handle<AllocationSite> current_site(*site_context->current(), isolate());
12044 if (*top_site == *current_site) {
12045 // We install a dependency for pretenuring only on the outermost literal.
12046 top_info()->dependencies()->AssumeTenuringDecision(top_site);
12047 }
12048 top_info()->dependencies()->AssumeTransitionStable(current_site);
12049
Ben Murdochc5610432016-08-08 18:44:38 +010012050 HInstruction* object =
12051 Add<HAllocate>(object_size_constant, type, pretenure_flag, instance_type,
12052 graph()->GetConstant0(), top_site);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012053
12054 // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
12055 // elements array may not get folded into the object. Hence, we set the
12056 // elements pointer to empty fixed array and let store elimination remove
12057 // this store in the folding case.
12058 HConstant* empty_fixed_array = Add<HConstant>(
12059 isolate()->factory()->empty_fixed_array());
12060 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
12061 empty_fixed_array);
12062
12063 BuildEmitObjectHeader(boilerplate_object, object);
12064
12065 // Similarly to the elements pointer, there is no guarantee that all
12066 // property allocations can get folded, so pre-initialize all in-object
12067 // properties to a safe value.
12068 BuildInitializeInobjectProperties(object, initial_map);
12069
12070 Handle<FixedArrayBase> elements(boilerplate_object->elements());
12071 int elements_size = (elements->length() > 0 &&
12072 elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
12073 elements->Size() : 0;
12074
12075 if (pretenure_flag == TENURED &&
12076 elements->map() == isolate()->heap()->fixed_cow_array_map() &&
12077 isolate()->heap()->InNewSpace(*elements)) {
12078 // If we would like to pretenure a fixed cow array, we must ensure that the
12079 // array is already in old space, otherwise we'll create too many old-to-
12080 // new-space pointers (overflowing the store buffer).
12081 elements = Handle<FixedArrayBase>(
12082 isolate()->factory()->CopyAndTenureFixedCOWArray(
12083 Handle<FixedArray>::cast(elements)));
12084 boilerplate_object->set_elements(*elements);
12085 }
12086
12087 HInstruction* object_elements = NULL;
12088 if (elements_size > 0) {
12089 HValue* object_elements_size = Add<HConstant>(elements_size);
12090 InstanceType instance_type = boilerplate_object->HasFastDoubleElements()
12091 ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE;
12092 object_elements = Add<HAllocate>(object_elements_size, HType::HeapObject(),
Ben Murdochc5610432016-08-08 18:44:38 +010012093 pretenure_flag, instance_type,
12094 graph()->GetConstant0(), top_site);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012095 BuildEmitElements(boilerplate_object, elements, object_elements,
12096 site_context);
12097 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
12098 object_elements);
12099 } else {
12100 Handle<Object> elements_field =
12101 Handle<Object>(boilerplate_object->elements(), isolate());
12102 HInstruction* object_elements_cow = Add<HConstant>(elements_field);
12103 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
12104 object_elements_cow);
12105 }
12106
12107 // Copy in-object properties.
12108 if (initial_map->NumberOfFields() != 0 ||
12109 initial_map->unused_property_fields() > 0) {
12110 BuildEmitInObjectProperties(boilerplate_object, object, site_context,
12111 pretenure_flag);
12112 }
12113 return object;
12114}
12115
12116
12117void HOptimizedGraphBuilder::BuildEmitObjectHeader(
12118 Handle<JSObject> boilerplate_object,
12119 HInstruction* object) {
12120 DCHECK(boilerplate_object->properties()->length() == 0);
12121
12122 Handle<Map> boilerplate_object_map(boilerplate_object->map());
12123 AddStoreMapConstant(object, boilerplate_object_map);
12124
12125 Handle<Object> properties_field =
12126 Handle<Object>(boilerplate_object->properties(), isolate());
12127 DCHECK(*properties_field == isolate()->heap()->empty_fixed_array());
12128 HInstruction* properties = Add<HConstant>(properties_field);
12129 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
12130 Add<HStoreNamedField>(object, access, properties);
12131
12132 if (boilerplate_object->IsJSArray()) {
12133 Handle<JSArray> boilerplate_array =
12134 Handle<JSArray>::cast(boilerplate_object);
12135 Handle<Object> length_field =
12136 Handle<Object>(boilerplate_array->length(), isolate());
12137 HInstruction* length = Add<HConstant>(length_field);
12138
12139 DCHECK(boilerplate_array->length()->IsSmi());
12140 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
12141 boilerplate_array->GetElementsKind()), length);
12142 }
12143}
12144
12145
12146void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
12147 Handle<JSObject> boilerplate_object,
12148 HInstruction* object,
12149 AllocationSiteUsageContext* site_context,
12150 PretenureFlag pretenure_flag) {
12151 Handle<Map> boilerplate_map(boilerplate_object->map());
12152 Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
12153 int limit = boilerplate_map->NumberOfOwnDescriptors();
12154
12155 int copied_fields = 0;
12156 for (int i = 0; i < limit; i++) {
12157 PropertyDetails details = descriptors->GetDetails(i);
12158 if (details.type() != DATA) continue;
12159 copied_fields++;
12160 FieldIndex field_index = FieldIndex::ForDescriptor(*boilerplate_map, i);
12161
12162
12163 int property_offset = field_index.offset();
12164 Handle<Name> name(descriptors->GetKey(i));
12165
12166 // The access for the store depends on the type of the boilerplate.
12167 HObjectAccess access = boilerplate_object->IsJSArray() ?
12168 HObjectAccess::ForJSArrayOffset(property_offset) :
12169 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
12170
12171 if (boilerplate_object->IsUnboxedDoubleField(field_index)) {
12172 CHECK(!boilerplate_object->IsJSArray());
12173 double value = boilerplate_object->RawFastDoublePropertyAt(field_index);
12174 access = access.WithRepresentation(Representation::Double());
12175 Add<HStoreNamedField>(object, access, Add<HConstant>(value));
12176 continue;
12177 }
12178 Handle<Object> value(boilerplate_object->RawFastPropertyAt(field_index),
12179 isolate());
12180
12181 if (value->IsJSObject()) {
12182 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
12183 Handle<AllocationSite> current_site = site_context->EnterNewScope();
12184 HInstruction* result =
12185 BuildFastLiteral(value_object, site_context);
12186 site_context->ExitScope(current_site, value_object);
12187 Add<HStoreNamedField>(object, access, result);
12188 } else {
12189 Representation representation = details.representation();
12190 HInstruction* value_instruction;
12191
12192 if (representation.IsDouble()) {
12193 // Allocate a HeapNumber box and store the value into it.
12194 HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
Ben Murdochc5610432016-08-08 18:44:38 +010012195 HInstruction* double_box = Add<HAllocate>(
12196 heap_number_constant, HType::HeapObject(), pretenure_flag,
12197 MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012198 AddStoreMapConstant(double_box,
12199 isolate()->factory()->mutable_heap_number_map());
12200 // Unwrap the mutable heap number from the boilerplate.
12201 HValue* double_value =
12202 Add<HConstant>(Handle<HeapNumber>::cast(value)->value());
12203 Add<HStoreNamedField>(
12204 double_box, HObjectAccess::ForHeapNumberValue(), double_value);
12205 value_instruction = double_box;
12206 } else if (representation.IsSmi()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +010012207 value_instruction = value->IsUninitialized(isolate())
12208 ? graph()->GetConstant0()
12209 : Add<HConstant>(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012210 // Ensure that value is stored as smi.
12211 access = access.WithRepresentation(representation);
12212 } else {
12213 value_instruction = Add<HConstant>(value);
12214 }
12215
12216 Add<HStoreNamedField>(object, access, value_instruction);
12217 }
12218 }
12219
12220 int inobject_properties = boilerplate_object->map()->GetInObjectProperties();
12221 HInstruction* value_instruction =
12222 Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
12223 for (int i = copied_fields; i < inobject_properties; i++) {
12224 DCHECK(boilerplate_object->IsJSObject());
12225 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
12226 HObjectAccess access =
12227 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
12228 Add<HStoreNamedField>(object, access, value_instruction);
12229 }
12230}
12231
12232
12233void HOptimizedGraphBuilder::BuildEmitElements(
12234 Handle<JSObject> boilerplate_object,
12235 Handle<FixedArrayBase> elements,
12236 HValue* object_elements,
12237 AllocationSiteUsageContext* site_context) {
12238 ElementsKind kind = boilerplate_object->map()->elements_kind();
12239 int elements_length = elements->length();
12240 HValue* object_elements_length = Add<HConstant>(elements_length);
12241 BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
12242
12243 // Copy elements backing store content.
12244 if (elements->IsFixedDoubleArray()) {
12245 BuildEmitFixedDoubleArray(elements, kind, object_elements);
12246 } else if (elements->IsFixedArray()) {
12247 BuildEmitFixedArray(elements, kind, object_elements,
12248 site_context);
12249 } else {
12250 UNREACHABLE();
12251 }
12252}
12253
12254
12255void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
12256 Handle<FixedArrayBase> elements,
12257 ElementsKind kind,
12258 HValue* object_elements) {
12259 HInstruction* boilerplate_elements = Add<HConstant>(elements);
12260 int elements_length = elements->length();
12261 for (int i = 0; i < elements_length; i++) {
12262 HValue* key_constant = Add<HConstant>(i);
12263 HInstruction* value_instruction =
12264 Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
12265 kind, ALLOW_RETURN_HOLE);
12266 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
12267 value_instruction, nullptr, kind);
12268 store->SetFlag(HValue::kAllowUndefinedAsNaN);
12269 }
12270}
12271
12272
12273void HOptimizedGraphBuilder::BuildEmitFixedArray(
12274 Handle<FixedArrayBase> elements,
12275 ElementsKind kind,
12276 HValue* object_elements,
12277 AllocationSiteUsageContext* site_context) {
12278 HInstruction* boilerplate_elements = Add<HConstant>(elements);
12279 int elements_length = elements->length();
12280 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
12281 for (int i = 0; i < elements_length; i++) {
12282 Handle<Object> value(fast_elements->get(i), isolate());
12283 HValue* key_constant = Add<HConstant>(i);
12284 if (value->IsJSObject()) {
12285 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
12286 Handle<AllocationSite> current_site = site_context->EnterNewScope();
12287 HInstruction* result =
12288 BuildFastLiteral(value_object, site_context);
12289 site_context->ExitScope(current_site, value_object);
12290 Add<HStoreKeyed>(object_elements, key_constant, result, nullptr, kind);
12291 } else {
12292 ElementsKind copy_kind =
12293 kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
12294 HInstruction* value_instruction =
12295 Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
12296 copy_kind, ALLOW_RETURN_HOLE);
12297 Add<HStoreKeyed>(object_elements, key_constant, value_instruction,
12298 nullptr, copy_kind);
12299 }
12300 }
12301}
12302
12303
12304void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
12305 DCHECK(!HasStackOverflow());
12306 DCHECK(current_block() != NULL);
12307 DCHECK(current_block()->HasPredecessor());
12308 HInstruction* instr = BuildThisFunction();
12309 return ast_context()->ReturnInstruction(instr, expr->id());
12310}
12311
12312
12313void HOptimizedGraphBuilder::VisitSuperPropertyReference(
12314 SuperPropertyReference* expr) {
12315 DCHECK(!HasStackOverflow());
12316 DCHECK(current_block() != NULL);
12317 DCHECK(current_block()->HasPredecessor());
12318 return Bailout(kSuperReference);
12319}
12320
12321
12322void HOptimizedGraphBuilder::VisitSuperCallReference(SuperCallReference* expr) {
12323 DCHECK(!HasStackOverflow());
12324 DCHECK(current_block() != NULL);
12325 DCHECK(current_block()->HasPredecessor());
12326 return Bailout(kSuperReference);
12327}
12328
12329
12330void HOptimizedGraphBuilder::VisitDeclarations(
12331 ZoneList<Declaration*>* declarations) {
12332 DCHECK(globals_.is_empty());
12333 AstVisitor::VisitDeclarations(declarations);
12334 if (!globals_.is_empty()) {
12335 Handle<FixedArray> array =
12336 isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
12337 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
Ben Murdochc5610432016-08-08 18:44:38 +010012338 int flags = current_info()->GetDeclareGlobalsFlags();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012339 Add<HDeclareGlobals>(array, flags);
12340 globals_.Rewind(0);
12341 }
12342}
12343
12344
12345void HOptimizedGraphBuilder::VisitVariableDeclaration(
12346 VariableDeclaration* declaration) {
12347 VariableProxy* proxy = declaration->proxy();
12348 VariableMode mode = declaration->mode();
12349 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +010012350 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012351 switch (variable->location()) {
12352 case VariableLocation::GLOBAL:
12353 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +010012354 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012355 globals_.Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +010012356 globals_.Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012357 return;
12358 case VariableLocation::PARAMETER:
12359 case VariableLocation::LOCAL:
12360 if (hole_init) {
12361 HValue* value = graph()->GetConstantHole();
12362 environment()->Bind(variable, value);
12363 }
12364 break;
12365 case VariableLocation::CONTEXT:
12366 if (hole_init) {
12367 HValue* value = graph()->GetConstantHole();
12368 HValue* context = environment()->context();
12369 HStoreContextSlot* store = Add<HStoreContextSlot>(
12370 context, variable->index(), HStoreContextSlot::kNoCheck, value);
12371 if (store->HasObservableSideEffects()) {
12372 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
12373 }
12374 }
12375 break;
12376 case VariableLocation::LOOKUP:
12377 return Bailout(kUnsupportedLookupSlotInDeclaration);
12378 }
12379}
12380
12381
12382void HOptimizedGraphBuilder::VisitFunctionDeclaration(
12383 FunctionDeclaration* declaration) {
12384 VariableProxy* proxy = declaration->proxy();
12385 Variable* variable = proxy->var();
12386 switch (variable->location()) {
12387 case VariableLocation::GLOBAL:
12388 case VariableLocation::UNALLOCATED: {
12389 globals_.Add(variable->name(), zone());
12390 Handle<SharedFunctionInfo> function = Compiler::GetSharedFunctionInfo(
12391 declaration->fun(), current_info()->script(), top_info());
12392 // Check for stack-overflow exception.
12393 if (function.is_null()) return SetStackOverflow();
12394 globals_.Add(function, zone());
12395 return;
12396 }
12397 case VariableLocation::PARAMETER:
12398 case VariableLocation::LOCAL: {
12399 CHECK_ALIVE(VisitForValue(declaration->fun()));
12400 HValue* value = Pop();
12401 BindIfLive(variable, value);
12402 break;
12403 }
12404 case VariableLocation::CONTEXT: {
12405 CHECK_ALIVE(VisitForValue(declaration->fun()));
12406 HValue* value = Pop();
12407 HValue* context = environment()->context();
12408 HStoreContextSlot* store = Add<HStoreContextSlot>(
12409 context, variable->index(), HStoreContextSlot::kNoCheck, value);
12410 if (store->HasObservableSideEffects()) {
12411 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
12412 }
12413 break;
12414 }
12415 case VariableLocation::LOOKUP:
12416 return Bailout(kUnsupportedLookupSlotInDeclaration);
12417 }
12418}
12419
12420
12421void HOptimizedGraphBuilder::VisitImportDeclaration(
12422 ImportDeclaration* declaration) {
12423 UNREACHABLE();
12424}
12425
12426
12427void HOptimizedGraphBuilder::VisitExportDeclaration(
12428 ExportDeclaration* declaration) {
12429 UNREACHABLE();
12430}
12431
12432
Ben Murdoch097c5b22016-05-18 11:27:45 +010012433void HOptimizedGraphBuilder::VisitRewritableExpression(
12434 RewritableExpression* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012435 CHECK_ALIVE(Visit(node->expression()));
12436}
12437
12438
12439// Generators for inline runtime functions.
12440// Support for types.
12441void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
12442 DCHECK(call->arguments()->length() == 1);
12443 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12444 HValue* value = Pop();
12445 HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
12446 return ast_context()->ReturnControl(result, call->id());
12447}
12448
12449
12450void HOptimizedGraphBuilder::GenerateIsJSReceiver(CallRuntime* call) {
12451 DCHECK(call->arguments()->length() == 1);
12452 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12453 HValue* value = Pop();
12454 HHasInstanceTypeAndBranch* result =
12455 New<HHasInstanceTypeAndBranch>(value,
12456 FIRST_JS_RECEIVER_TYPE,
12457 LAST_JS_RECEIVER_TYPE);
12458 return ast_context()->ReturnControl(result, call->id());
12459}
12460
12461
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012462void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
12463 DCHECK(call->arguments()->length() == 1);
12464 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12465 HValue* value = Pop();
12466 HHasCachedArrayIndexAndBranch* result =
12467 New<HHasCachedArrayIndexAndBranch>(value);
12468 return ast_context()->ReturnControl(result, call->id());
12469}
12470
12471
12472void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
12473 DCHECK(call->arguments()->length() == 1);
12474 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12475 HValue* value = Pop();
12476 HHasInstanceTypeAndBranch* result =
12477 New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
12478 return ast_context()->ReturnControl(result, call->id());
12479}
12480
12481
12482void HOptimizedGraphBuilder::GenerateIsTypedArray(CallRuntime* call) {
12483 DCHECK(call->arguments()->length() == 1);
12484 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12485 HValue* value = Pop();
12486 HHasInstanceTypeAndBranch* result =
12487 New<HHasInstanceTypeAndBranch>(value, JS_TYPED_ARRAY_TYPE);
12488 return ast_context()->ReturnControl(result, call->id());
12489}
12490
12491
12492void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
12493 DCHECK(call->arguments()->length() == 1);
12494 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12495 HValue* value = Pop();
12496 HHasInstanceTypeAndBranch* result =
12497 New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
12498 return ast_context()->ReturnControl(result, call->id());
12499}
12500
12501
12502void HOptimizedGraphBuilder::GenerateToInteger(CallRuntime* call) {
12503 DCHECK_EQ(1, call->arguments()->length());
12504 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12505 HValue* input = Pop();
12506 if (input->type().IsSmi()) {
12507 return ast_context()->ReturnValue(input);
12508 } else {
Ben Murdochda12d292016-06-02 14:46:10 +010012509 Callable callable = CodeFactory::ToInteger(isolate());
12510 HValue* stub = Add<HConstant>(callable.code());
12511 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +010012512 HInstruction* result = New<HCallWithDescriptor>(
12513 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdochda12d292016-06-02 14:46:10 +010012514 return ast_context()->ReturnInstruction(result, call->id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012515 }
12516}
12517
12518
Ben Murdoch097c5b22016-05-18 11:27:45 +010012519void HOptimizedGraphBuilder::GenerateToName(CallRuntime* call) {
12520 DCHECK_EQ(1, call->arguments()->length());
12521 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12522 HValue* input = Pop();
12523 if (input->type().IsSmi()) {
12524 HValue* result = BuildNumberToString(input, Type::SignedSmall());
12525 return ast_context()->ReturnValue(result);
12526 } else if (input->type().IsTaggedNumber()) {
12527 HValue* result = BuildNumberToString(input, Type::Number());
12528 return ast_context()->ReturnValue(result);
12529 } else if (input->type().IsString()) {
12530 return ast_context()->ReturnValue(input);
12531 } else {
12532 Callable callable = CodeFactory::ToName(isolate());
12533 HValue* stub = Add<HConstant>(callable.code());
12534 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +010012535 HInstruction* result = New<HCallWithDescriptor>(
12536 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch097c5b22016-05-18 11:27:45 +010012537 return ast_context()->ReturnInstruction(result, call->id());
12538 }
12539}
12540
12541
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012542void HOptimizedGraphBuilder::GenerateToObject(CallRuntime* call) {
12543 DCHECK_EQ(1, call->arguments()->length());
12544 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12545 HValue* value = Pop();
12546 HValue* result = BuildToObject(value);
12547 return ast_context()->ReturnValue(result);
12548}
12549
12550
12551void HOptimizedGraphBuilder::GenerateToString(CallRuntime* call) {
12552 DCHECK_EQ(1, call->arguments()->length());
12553 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012554 HValue* input = Pop();
12555 if (input->type().IsString()) {
12556 return ast_context()->ReturnValue(input);
12557 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +010012558 Callable callable = CodeFactory::ToString(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012559 HValue* stub = Add<HConstant>(callable.code());
12560 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +010012561 HInstruction* result = New<HCallWithDescriptor>(
12562 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012563 return ast_context()->ReturnInstruction(result, call->id());
12564 }
12565}
12566
12567
12568void HOptimizedGraphBuilder::GenerateToLength(CallRuntime* call) {
12569 DCHECK_EQ(1, call->arguments()->length());
12570 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12571 Callable callable = CodeFactory::ToLength(isolate());
12572 HValue* input = Pop();
12573 HValue* stub = Add<HConstant>(callable.code());
12574 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +010012575 HInstruction* result = New<HCallWithDescriptor>(
12576 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012577 return ast_context()->ReturnInstruction(result, call->id());
12578}
12579
12580
12581void HOptimizedGraphBuilder::GenerateToNumber(CallRuntime* call) {
12582 DCHECK_EQ(1, call->arguments()->length());
12583 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12584 Callable callable = CodeFactory::ToNumber(isolate());
12585 HValue* input = Pop();
Ben Murdoch097c5b22016-05-18 11:27:45 +010012586 HValue* result = BuildToNumber(input);
12587 if (result->HasObservableSideEffects()) {
12588 if (!ast_context()->IsEffect()) Push(result);
12589 Add<HSimulate>(call->id(), REMOVABLE_SIMULATE);
12590 if (!ast_context()->IsEffect()) result = Pop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012591 }
Ben Murdoch097c5b22016-05-18 11:27:45 +010012592 return ast_context()->ReturnValue(result);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012593}
12594
12595
12596void HOptimizedGraphBuilder::GenerateIsJSProxy(CallRuntime* call) {
12597 DCHECK(call->arguments()->length() == 1);
12598 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12599 HValue* value = Pop();
12600 HIfContinuation continuation;
12601 IfBuilder if_proxy(this);
12602
12603 HValue* smicheck = if_proxy.IfNot<HIsSmiAndBranch>(value);
12604 if_proxy.And();
12605 HValue* map = Add<HLoadNamedField>(value, smicheck, HObjectAccess::ForMap());
12606 HValue* instance_type =
12607 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
12608 if_proxy.If<HCompareNumericAndBranch>(
12609 instance_type, Add<HConstant>(JS_PROXY_TYPE), Token::EQ);
12610
12611 if_proxy.CaptureContinuation(&continuation);
12612 return ast_context()->ReturnContinuation(&continuation, call->id());
12613}
12614
12615
12616void HOptimizedGraphBuilder::GenerateHasFastPackedElements(CallRuntime* call) {
12617 DCHECK(call->arguments()->length() == 1);
12618 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12619 HValue* object = Pop();
12620 HIfContinuation continuation(graph()->CreateBasicBlock(),
12621 graph()->CreateBasicBlock());
12622 IfBuilder if_not_smi(this);
12623 if_not_smi.IfNot<HIsSmiAndBranch>(object);
12624 if_not_smi.Then();
12625 {
12626 NoObservableSideEffectsScope no_effects(this);
12627
12628 IfBuilder if_fast_packed(this);
12629 HValue* elements_kind = BuildGetElementsKind(object);
12630 if_fast_packed.If<HCompareNumericAndBranch>(
12631 elements_kind, Add<HConstant>(FAST_SMI_ELEMENTS), Token::EQ);
12632 if_fast_packed.Or();
12633 if_fast_packed.If<HCompareNumericAndBranch>(
12634 elements_kind, Add<HConstant>(FAST_ELEMENTS), Token::EQ);
12635 if_fast_packed.Or();
12636 if_fast_packed.If<HCompareNumericAndBranch>(
12637 elements_kind, Add<HConstant>(FAST_DOUBLE_ELEMENTS), Token::EQ);
12638 if_fast_packed.JoinContinuation(&continuation);
12639 }
12640 if_not_smi.JoinContinuation(&continuation);
12641 return ast_context()->ReturnContinuation(&continuation, call->id());
12642}
12643
12644
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012645void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
12646 DCHECK(call->arguments()->length() == 1);
12647 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12648 HValue* object = Pop();
12649
12650 IfBuilder if_objectisvalue(this);
12651 HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
12652 object, JS_VALUE_TYPE);
12653 if_objectisvalue.Then();
12654 {
12655 // Return the actual value.
12656 Push(Add<HLoadNamedField>(
12657 object, objectisvalue,
12658 HObjectAccess::ForObservableJSObjectOffset(
12659 JSValue::kValueOffset)));
12660 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12661 }
12662 if_objectisvalue.Else();
12663 {
12664 // If the object is not a value return the object.
12665 Push(object);
12666 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12667 }
12668 if_objectisvalue.End();
12669 return ast_context()->ReturnValue(Pop());
12670}
12671
12672
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012673// Fast support for charCodeAt(n).
12674void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
12675 DCHECK(call->arguments()->length() == 2);
12676 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12677 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12678 HValue* index = Pop();
12679 HValue* string = Pop();
12680 HInstruction* result = BuildStringCharCodeAt(string, index);
12681 return ast_context()->ReturnInstruction(result, call->id());
12682}
12683
12684
12685// Fast support for string.charAt(n) and string[n].
12686void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
12687 DCHECK(call->arguments()->length() == 1);
12688 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12689 HValue* char_code = Pop();
12690 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
12691 return ast_context()->ReturnInstruction(result, call->id());
12692}
12693
12694
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012695// Fast support for SubString.
12696void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
12697 DCHECK_EQ(3, call->arguments()->length());
12698 CHECK_ALIVE(VisitExpressions(call->arguments()));
12699 PushArgumentsFromEnvironment(call->arguments()->length());
Ben Murdoch097c5b22016-05-18 11:27:45 +010012700 Callable callable = CodeFactory::SubString(isolate());
12701 HValue* stub = Add<HConstant>(callable.code());
12702 HValue* values[] = {context()};
Ben Murdochc5610432016-08-08 18:44:38 +010012703 HInstruction* result =
12704 New<HCallWithDescriptor>(stub, call->arguments()->length(),
12705 callable.descriptor(), ArrayVector(values));
Ben Murdoch097c5b22016-05-18 11:27:45 +010012706 result->set_type(HType::String());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012707 return ast_context()->ReturnInstruction(result, call->id());
12708}
12709
Ben Murdochda12d292016-06-02 14:46:10 +010012710// Support for direct creation of new objects.
12711void HOptimizedGraphBuilder::GenerateNewObject(CallRuntime* call) {
12712 DCHECK_EQ(2, call->arguments()->length());
12713 CHECK_ALIVE(VisitExpressions(call->arguments()));
12714 FastNewObjectStub stub(isolate());
12715 FastNewObjectDescriptor descriptor(isolate());
12716 HValue* values[] = {context(), Pop(), Pop()};
12717 HConstant* stub_value = Add<HConstant>(stub.GetCode());
Ben Murdochc5610432016-08-08 18:44:38 +010012718 HInstruction* result =
12719 New<HCallWithDescriptor>(stub_value, 0, descriptor, ArrayVector(values));
Ben Murdochda12d292016-06-02 14:46:10 +010012720 return ast_context()->ReturnInstruction(result, call->id());
12721}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012722
12723// Support for direct calls from JavaScript to native RegExp code.
12724void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
12725 DCHECK_EQ(4, call->arguments()->length());
12726 CHECK_ALIVE(VisitExpressions(call->arguments()));
12727 PushArgumentsFromEnvironment(call->arguments()->length());
Ben Murdoch097c5b22016-05-18 11:27:45 +010012728 Callable callable = CodeFactory::RegExpExec(isolate());
12729 HValue* stub = Add<HConstant>(callable.code());
12730 HValue* values[] = {context()};
Ben Murdochc5610432016-08-08 18:44:38 +010012731 HInstruction* result =
12732 New<HCallWithDescriptor>(stub, call->arguments()->length(),
12733 callable.descriptor(), ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012734 return ast_context()->ReturnInstruction(result, call->id());
12735}
12736
12737
12738void HOptimizedGraphBuilder::GenerateRegExpFlags(CallRuntime* call) {
12739 DCHECK_EQ(1, call->arguments()->length());
12740 CHECK_ALIVE(VisitExpressions(call->arguments()));
12741 HValue* regexp = Pop();
12742 HInstruction* result =
12743 New<HLoadNamedField>(regexp, nullptr, HObjectAccess::ForJSRegExpFlags());
12744 return ast_context()->ReturnInstruction(result, call->id());
12745}
12746
12747
12748void HOptimizedGraphBuilder::GenerateRegExpSource(CallRuntime* call) {
12749 DCHECK_EQ(1, call->arguments()->length());
12750 CHECK_ALIVE(VisitExpressions(call->arguments()));
12751 HValue* regexp = Pop();
12752 HInstruction* result =
12753 New<HLoadNamedField>(regexp, nullptr, HObjectAccess::ForJSRegExpSource());
12754 return ast_context()->ReturnInstruction(result, call->id());
12755}
12756
12757
12758void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
12759 DCHECK_EQ(1, call->arguments()->length());
12760 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12761 HValue* value = Pop();
12762 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
12763 return ast_context()->ReturnInstruction(result, call->id());
12764}
12765
12766
12767void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
12768 DCHECK_EQ(1, call->arguments()->length());
12769 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12770 HValue* value = Pop();
12771 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
12772 return ast_context()->ReturnInstruction(result, call->id());
12773}
12774
12775
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012776// Construct a RegExp exec result with two in-object properties.
12777void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
12778 DCHECK_EQ(3, call->arguments()->length());
12779 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12780 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12781 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12782 HValue* input = Pop();
12783 HValue* index = Pop();
12784 HValue* length = Pop();
12785 HValue* result = BuildRegExpConstructResult(length, index, input);
12786 return ast_context()->ReturnValue(result);
12787}
12788
12789
12790// Fast support for number to string.
12791void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
12792 DCHECK_EQ(1, call->arguments()->length());
12793 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12794 HValue* number = Pop();
Ben Murdoch097c5b22016-05-18 11:27:45 +010012795 HValue* result = BuildNumberToString(number, Type::Any());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012796 return ast_context()->ReturnValue(result);
12797}
12798
12799
12800// Fast support for calls.
12801void HOptimizedGraphBuilder::GenerateCall(CallRuntime* call) {
12802 DCHECK_LE(2, call->arguments()->length());
12803 CHECK_ALIVE(VisitExpressions(call->arguments()));
12804 CallTrampolineDescriptor descriptor(isolate());
12805 PushArgumentsFromEnvironment(call->arguments()->length() - 1);
12806 HValue* trampoline = Add<HConstant>(isolate()->builtins()->Call());
12807 HValue* target = Pop();
12808 HValue* values[] = {context(), target,
12809 Add<HConstant>(call->arguments()->length() - 2)};
Ben Murdochc5610432016-08-08 18:44:38 +010012810 HInstruction* result =
12811 New<HCallWithDescriptor>(trampoline, call->arguments()->length() - 1,
12812 descriptor, ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012813 return ast_context()->ReturnInstruction(result, call->id());
12814}
12815
12816
12817// Fast call to math functions.
12818void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
12819 DCHECK_EQ(2, call->arguments()->length());
12820 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12821 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12822 HValue* right = Pop();
12823 HValue* left = Pop();
12824 HInstruction* result = NewUncasted<HPower>(left, right);
12825 return ast_context()->ReturnInstruction(result, call->id());
12826}
12827
12828
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012829void HOptimizedGraphBuilder::GenerateFixedArrayGet(CallRuntime* call) {
12830 DCHECK(call->arguments()->length() == 2);
12831 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12832 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12833 HValue* index = Pop();
12834 HValue* object = Pop();
12835 HInstruction* result = New<HLoadKeyed>(
12836 object, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
12837 return ast_context()->ReturnInstruction(result, call->id());
12838}
12839
12840
12841void HOptimizedGraphBuilder::GenerateFixedArraySet(CallRuntime* call) {
12842 DCHECK(call->arguments()->length() == 3);
12843 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12844 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12845 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12846 HValue* value = Pop();
12847 HValue* index = Pop();
12848 HValue* object = Pop();
12849 NoObservableSideEffectsScope no_effects(this);
12850 Add<HStoreKeyed>(object, index, value, nullptr, FAST_HOLEY_ELEMENTS);
12851 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12852}
12853
12854
12855void HOptimizedGraphBuilder::GenerateTheHole(CallRuntime* call) {
12856 DCHECK(call->arguments()->length() == 0);
12857 return ast_context()->ReturnValue(graph()->GetConstantHole());
12858}
12859
12860
12861void HOptimizedGraphBuilder::GenerateCreateIterResultObject(CallRuntime* call) {
12862 DCHECK_EQ(2, call->arguments()->length());
12863 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12864 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12865 HValue* done = Pop();
12866 HValue* value = Pop();
12867 HValue* result = BuildCreateIterResultObject(value, done);
12868 return ast_context()->ReturnValue(result);
12869}
12870
12871
12872void HOptimizedGraphBuilder::GenerateJSCollectionGetTable(CallRuntime* call) {
12873 DCHECK(call->arguments()->length() == 1);
12874 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12875 HValue* receiver = Pop();
12876 HInstruction* result = New<HLoadNamedField>(
12877 receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12878 return ast_context()->ReturnInstruction(result, call->id());
12879}
12880
12881
12882void HOptimizedGraphBuilder::GenerateStringGetRawHashField(CallRuntime* call) {
12883 DCHECK(call->arguments()->length() == 1);
12884 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12885 HValue* object = Pop();
12886 HInstruction* result = New<HLoadNamedField>(
12887 object, nullptr, HObjectAccess::ForStringHashField());
12888 return ast_context()->ReturnInstruction(result, call->id());
12889}
12890
12891
12892template <typename CollectionType>
12893HValue* HOptimizedGraphBuilder::BuildAllocateOrderedHashTable() {
12894 static const int kCapacity = CollectionType::kMinCapacity;
12895 static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
12896 static const int kFixedArrayLength = CollectionType::kHashTableStartIndex +
12897 kBucketCount +
12898 (kCapacity * CollectionType::kEntrySize);
12899 static const int kSizeInBytes =
12900 FixedArray::kHeaderSize + (kFixedArrayLength * kPointerSize);
12901
12902 // Allocate the table and add the proper map.
12903 HValue* table =
12904 Add<HAllocate>(Add<HConstant>(kSizeInBytes), HType::HeapObject(),
Ben Murdochc5610432016-08-08 18:44:38 +010012905 NOT_TENURED, FIXED_ARRAY_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012906 AddStoreMapConstant(table, isolate()->factory()->ordered_hash_table_map());
12907
12908 // Initialize the FixedArray...
12909 HValue* length = Add<HConstant>(kFixedArrayLength);
12910 Add<HStoreNamedField>(table, HObjectAccess::ForFixedArrayLength(), length);
12911
12912 // ...and the OrderedHashTable fields.
12913 Add<HStoreNamedField>(
12914 table,
12915 HObjectAccess::ForOrderedHashTableNumberOfBuckets<CollectionType>(),
12916 Add<HConstant>(kBucketCount));
12917 Add<HStoreNamedField>(
12918 table,
12919 HObjectAccess::ForOrderedHashTableNumberOfElements<CollectionType>(),
12920 graph()->GetConstant0());
12921 Add<HStoreNamedField>(
12922 table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12923 CollectionType>(),
12924 graph()->GetConstant0());
12925
12926 // Fill the buckets with kNotFound.
12927 HValue* not_found = Add<HConstant>(CollectionType::kNotFound);
12928 for (int i = 0; i < kBucketCount; ++i) {
12929 Add<HStoreNamedField>(
12930 table, HObjectAccess::ForOrderedHashTableBucket<CollectionType>(i),
12931 not_found);
12932 }
12933
12934 // Fill the data table with undefined.
12935 HValue* undefined = graph()->GetConstantUndefined();
12936 for (int i = 0; i < (kCapacity * CollectionType::kEntrySize); ++i) {
12937 Add<HStoreNamedField>(table,
12938 HObjectAccess::ForOrderedHashTableDataTableIndex<
12939 CollectionType, kBucketCount>(i),
12940 undefined);
12941 }
12942
12943 return table;
12944}
12945
12946
12947void HOptimizedGraphBuilder::GenerateSetInitialize(CallRuntime* call) {
12948 DCHECK(call->arguments()->length() == 1);
12949 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12950 HValue* receiver = Pop();
12951
12952 NoObservableSideEffectsScope no_effects(this);
12953 HValue* table = BuildAllocateOrderedHashTable<OrderedHashSet>();
12954 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12955 return ast_context()->ReturnValue(receiver);
12956}
12957
12958
12959void HOptimizedGraphBuilder::GenerateMapInitialize(CallRuntime* call) {
12960 DCHECK(call->arguments()->length() == 1);
12961 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12962 HValue* receiver = Pop();
12963
12964 NoObservableSideEffectsScope no_effects(this);
12965 HValue* table = BuildAllocateOrderedHashTable<OrderedHashMap>();
12966 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12967 return ast_context()->ReturnValue(receiver);
12968}
12969
12970
12971template <typename CollectionType>
12972void HOptimizedGraphBuilder::BuildOrderedHashTableClear(HValue* receiver) {
12973 HValue* old_table = Add<HLoadNamedField>(
12974 receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12975 HValue* new_table = BuildAllocateOrderedHashTable<CollectionType>();
12976 Add<HStoreNamedField>(
12977 old_table, HObjectAccess::ForOrderedHashTableNextTable<CollectionType>(),
12978 new_table);
12979 Add<HStoreNamedField>(
12980 old_table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12981 CollectionType>(),
12982 Add<HConstant>(CollectionType::kClearedTableSentinel));
12983 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(),
12984 new_table);
12985}
12986
12987
12988void HOptimizedGraphBuilder::GenerateSetClear(CallRuntime* call) {
12989 DCHECK(call->arguments()->length() == 1);
12990 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12991 HValue* receiver = Pop();
12992
12993 NoObservableSideEffectsScope no_effects(this);
12994 BuildOrderedHashTableClear<OrderedHashSet>(receiver);
12995 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12996}
12997
12998
12999void HOptimizedGraphBuilder::GenerateMapClear(CallRuntime* call) {
13000 DCHECK(call->arguments()->length() == 1);
13001 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
13002 HValue* receiver = Pop();
13003
13004 NoObservableSideEffectsScope no_effects(this);
13005 BuildOrderedHashTableClear<OrderedHashMap>(receiver);
13006 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
13007}
13008
13009
13010void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
13011 DCHECK(call->arguments()->length() == 1);
13012 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
13013 HValue* value = Pop();
13014 HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
13015 return ast_context()->ReturnInstruction(result, call->id());
13016}
13017
13018
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013019void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
13020 CallRuntime* call) {
13021 Add<HDebugBreak>();
13022 return ast_context()->ReturnValue(graph()->GetConstant0());
13023}
13024
13025
13026void HOptimizedGraphBuilder::GenerateDebugIsActive(CallRuntime* call) {
13027 DCHECK(call->arguments()->length() == 0);
13028 HValue* ref =
13029 Add<HConstant>(ExternalReference::debug_is_active_address(isolate()));
13030 HValue* value =
13031 Add<HLoadNamedField>(ref, nullptr, HObjectAccess::ForExternalUInteger8());
13032 return ast_context()->ReturnValue(value);
13033}
13034
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013035#undef CHECK_BAILOUT
13036#undef CHECK_ALIVE
13037
13038
13039HEnvironment::HEnvironment(HEnvironment* outer,
13040 Scope* scope,
13041 Handle<JSFunction> closure,
13042 Zone* zone)
13043 : closure_(closure),
13044 values_(0, zone),
13045 frame_type_(JS_FUNCTION),
13046 parameter_count_(0),
13047 specials_count_(1),
13048 local_count_(0),
13049 outer_(outer),
13050 entry_(NULL),
13051 pop_count_(0),
13052 push_count_(0),
13053 ast_id_(BailoutId::None()),
13054 zone_(zone) {
13055 Scope* declaration_scope = scope->DeclarationScope();
13056 Initialize(declaration_scope->num_parameters() + 1,
13057 declaration_scope->num_stack_slots(), 0);
13058}
13059
13060
13061HEnvironment::HEnvironment(Zone* zone, int parameter_count)
13062 : values_(0, zone),
13063 frame_type_(STUB),
13064 parameter_count_(parameter_count),
13065 specials_count_(1),
13066 local_count_(0),
13067 outer_(NULL),
13068 entry_(NULL),
13069 pop_count_(0),
13070 push_count_(0),
13071 ast_id_(BailoutId::None()),
13072 zone_(zone) {
13073 Initialize(parameter_count, 0, 0);
13074}
13075
13076
13077HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
13078 : values_(0, zone),
13079 frame_type_(JS_FUNCTION),
13080 parameter_count_(0),
13081 specials_count_(0),
13082 local_count_(0),
13083 outer_(NULL),
13084 entry_(NULL),
13085 pop_count_(0),
13086 push_count_(0),
13087 ast_id_(other->ast_id()),
13088 zone_(zone) {
13089 Initialize(other);
13090}
13091
13092
13093HEnvironment::HEnvironment(HEnvironment* outer,
13094 Handle<JSFunction> closure,
13095 FrameType frame_type,
13096 int arguments,
13097 Zone* zone)
13098 : closure_(closure),
13099 values_(arguments, zone),
13100 frame_type_(frame_type),
13101 parameter_count_(arguments),
13102 specials_count_(0),
13103 local_count_(0),
13104 outer_(outer),
13105 entry_(NULL),
13106 pop_count_(0),
13107 push_count_(0),
13108 ast_id_(BailoutId::None()),
13109 zone_(zone) {
13110}
13111
13112
13113void HEnvironment::Initialize(int parameter_count,
13114 int local_count,
13115 int stack_height) {
13116 parameter_count_ = parameter_count;
13117 local_count_ = local_count;
13118
13119 // Avoid reallocating the temporaries' backing store on the first Push.
13120 int total = parameter_count + specials_count_ + local_count + stack_height;
13121 values_.Initialize(total + 4, zone());
13122 for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
13123}
13124
13125
13126void HEnvironment::Initialize(const HEnvironment* other) {
13127 closure_ = other->closure();
13128 values_.AddAll(other->values_, zone());
13129 assigned_variables_.Union(other->assigned_variables_, zone());
13130 frame_type_ = other->frame_type_;
13131 parameter_count_ = other->parameter_count_;
13132 local_count_ = other->local_count_;
13133 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
13134 entry_ = other->entry_;
13135 pop_count_ = other->pop_count_;
13136 push_count_ = other->push_count_;
13137 specials_count_ = other->specials_count_;
13138 ast_id_ = other->ast_id_;
13139}
13140
13141
13142void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
13143 DCHECK(!block->IsLoopHeader());
13144 DCHECK(values_.length() == other->values_.length());
13145
13146 int length = values_.length();
13147 for (int i = 0; i < length; ++i) {
13148 HValue* value = values_[i];
13149 if (value != NULL && value->IsPhi() && value->block() == block) {
13150 // There is already a phi for the i'th value.
13151 HPhi* phi = HPhi::cast(value);
13152 // Assert index is correct and that we haven't missed an incoming edge.
13153 DCHECK(phi->merged_index() == i || !phi->HasMergedIndex());
13154 DCHECK(phi->OperandCount() == block->predecessors()->length());
13155 phi->AddInput(other->values_[i]);
13156 } else if (values_[i] != other->values_[i]) {
13157 // There is a fresh value on the incoming edge, a phi is needed.
13158 DCHECK(values_[i] != NULL && other->values_[i] != NULL);
13159 HPhi* phi = block->AddNewPhi(i);
13160 HValue* old_value = values_[i];
13161 for (int j = 0; j < block->predecessors()->length(); j++) {
13162 phi->AddInput(old_value);
13163 }
13164 phi->AddInput(other->values_[i]);
13165 this->values_[i] = phi;
13166 }
13167 }
13168}
13169
13170
13171void HEnvironment::Bind(int index, HValue* value) {
13172 DCHECK(value != NULL);
13173 assigned_variables_.Add(index, zone());
13174 values_[index] = value;
13175}
13176
13177
13178bool HEnvironment::HasExpressionAt(int index) const {
13179 return index >= parameter_count_ + specials_count_ + local_count_;
13180}
13181
13182
13183bool HEnvironment::ExpressionStackIsEmpty() const {
13184 DCHECK(length() >= first_expression_index());
13185 return length() == first_expression_index();
13186}
13187
13188
13189void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
13190 int count = index_from_top + 1;
13191 int index = values_.length() - count;
13192 DCHECK(HasExpressionAt(index));
13193 // The push count must include at least the element in question or else
13194 // the new value will not be included in this environment's history.
13195 if (push_count_ < count) {
13196 // This is the same effect as popping then re-pushing 'count' elements.
13197 pop_count_ += (count - push_count_);
13198 push_count_ = count;
13199 }
13200 values_[index] = value;
13201}
13202
13203
13204HValue* HEnvironment::RemoveExpressionStackAt(int index_from_top) {
13205 int count = index_from_top + 1;
13206 int index = values_.length() - count;
13207 DCHECK(HasExpressionAt(index));
13208 // Simulate popping 'count' elements and then
13209 // pushing 'count - 1' elements back.
13210 pop_count_ += Max(count - push_count_, 0);
13211 push_count_ = Max(push_count_ - count, 0) + (count - 1);
13212 return values_.Remove(index);
13213}
13214
13215
13216void HEnvironment::Drop(int count) {
13217 for (int i = 0; i < count; ++i) {
13218 Pop();
13219 }
13220}
13221
13222
13223void HEnvironment::Print() const {
13224 OFStream os(stdout);
13225 os << *this << "\n";
13226}
13227
13228
13229HEnvironment* HEnvironment::Copy() const {
13230 return new(zone()) HEnvironment(this, zone());
13231}
13232
13233
13234HEnvironment* HEnvironment::CopyWithoutHistory() const {
13235 HEnvironment* result = Copy();
13236 result->ClearHistory();
13237 return result;
13238}
13239
13240
13241HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
13242 HEnvironment* new_env = Copy();
13243 for (int i = 0; i < values_.length(); ++i) {
13244 HPhi* phi = loop_header->AddNewPhi(i);
13245 phi->AddInput(values_[i]);
13246 new_env->values_[i] = phi;
13247 }
13248 new_env->ClearHistory();
13249 return new_env;
13250}
13251
13252
13253HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
13254 Handle<JSFunction> target,
13255 FrameType frame_type,
13256 int arguments) const {
13257 HEnvironment* new_env =
13258 new(zone()) HEnvironment(outer, target, frame_type,
13259 arguments + 1, zone());
13260 for (int i = 0; i <= arguments; ++i) { // Include receiver.
13261 new_env->Push(ExpressionStackAt(arguments - i));
13262 }
13263 new_env->ClearHistory();
13264 return new_env;
13265}
13266
Ben Murdochda12d292016-06-02 14:46:10 +010013267void HEnvironment::MarkAsTailCaller() {
13268 DCHECK_EQ(JS_FUNCTION, frame_type());
13269 frame_type_ = TAIL_CALLER_FUNCTION;
13270}
13271
13272void HEnvironment::ClearTailCallerMark() {
13273 DCHECK_EQ(TAIL_CALLER_FUNCTION, frame_type());
13274 frame_type_ = JS_FUNCTION;
13275}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013276
13277HEnvironment* HEnvironment::CopyForInlining(
Ben Murdochda12d292016-06-02 14:46:10 +010013278 Handle<JSFunction> target, int arguments, FunctionLiteral* function,
13279 HConstant* undefined, InliningKind inlining_kind,
13280 TailCallMode syntactic_tail_call_mode) const {
13281 DCHECK_EQ(JS_FUNCTION, frame_type());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013282
13283 // Outer environment is a copy of this one without the arguments.
13284 int arity = function->scope()->num_parameters();
13285
13286 HEnvironment* outer = Copy();
13287 outer->Drop(arguments + 1); // Including receiver.
13288 outer->ClearHistory();
13289
Ben Murdochda12d292016-06-02 14:46:10 +010013290 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
13291 DCHECK_EQ(NORMAL_RETURN, inlining_kind);
13292 outer->MarkAsTailCaller();
13293 }
13294
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013295 if (inlining_kind == CONSTRUCT_CALL_RETURN) {
13296 // Create artificial constructor stub environment. The receiver should
13297 // actually be the constructor function, but we pass the newly allocated
13298 // object instead, DoComputeConstructStubFrame() relies on that.
13299 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
13300 } else if (inlining_kind == GETTER_CALL_RETURN) {
13301 // We need an additional StackFrame::INTERNAL frame for restoring the
13302 // correct context.
13303 outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
13304 } else if (inlining_kind == SETTER_CALL_RETURN) {
13305 // We need an additional StackFrame::INTERNAL frame for temporarily saving
13306 // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
13307 outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
13308 }
13309
13310 if (arity != arguments) {
13311 // Create artificial arguments adaptation environment.
13312 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
13313 }
13314
13315 HEnvironment* inner =
13316 new(zone()) HEnvironment(outer, function->scope(), target, zone());
13317 // Get the argument values from the original environment.
13318 for (int i = 0; i <= arity; ++i) { // Include receiver.
13319 HValue* push = (i <= arguments) ?
13320 ExpressionStackAt(arguments - i) : undefined;
13321 inner->SetValueAt(i, push);
13322 }
13323 inner->SetValueAt(arity + 1, context());
13324 for (int i = arity + 2; i < inner->length(); ++i) {
13325 inner->SetValueAt(i, undefined);
13326 }
13327
13328 inner->set_ast_id(BailoutId::FunctionEntry());
13329 return inner;
13330}
13331
13332
13333std::ostream& operator<<(std::ostream& os, const HEnvironment& env) {
13334 for (int i = 0; i < env.length(); i++) {
13335 if (i == 0) os << "parameters\n";
13336 if (i == env.parameter_count()) os << "specials\n";
13337 if (i == env.parameter_count() + env.specials_count()) os << "locals\n";
13338 if (i == env.parameter_count() + env.specials_count() + env.local_count()) {
13339 os << "expressions\n";
13340 }
13341 HValue* val = env.values()->at(i);
13342 os << i << ": ";
13343 if (val != NULL) {
13344 os << val;
13345 } else {
13346 os << "NULL";
13347 }
13348 os << "\n";
13349 }
13350 return os << "\n";
13351}
13352
13353
13354void HTracer::TraceCompilation(CompilationInfo* info) {
13355 Tag tag(this, "compilation");
Ben Murdochc5610432016-08-08 18:44:38 +010013356 std::string name;
Ben Murdoch61f157c2016-09-16 13:49:30 +010013357 if (info->parse_info()) {
13358 Object* source_name = info->script()->name();
13359 if (source_name->IsString()) {
13360 String* str = String::cast(source_name);
13361 if (str->length() > 0) {
13362 name.append(str->ToCString().get());
13363 name.append(":");
13364 }
Ben Murdochc5610432016-08-08 18:44:38 +010013365 }
13366 }
13367 base::SmartArrayPointer<char> method_name = info->GetDebugName();
13368 name.append(method_name.get());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013369 if (info->IsOptimizing()) {
Ben Murdochc5610432016-08-08 18:44:38 +010013370 PrintStringProperty("name", name.c_str());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013371 PrintIndent();
Ben Murdochc5610432016-08-08 18:44:38 +010013372 trace_.Add("method \"%s:%d\"\n", method_name.get(),
13373 info->optimization_id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013374 } else {
Ben Murdochc5610432016-08-08 18:44:38 +010013375 PrintStringProperty("name", name.c_str());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013376 PrintStringProperty("method", "stub");
13377 }
13378 PrintLongProperty("date",
13379 static_cast<int64_t>(base::OS::TimeCurrentMillis()));
13380}
13381
13382
13383void HTracer::TraceLithium(const char* name, LChunk* chunk) {
13384 DCHECK(!chunk->isolate()->concurrent_recompilation_enabled());
13385 AllowHandleDereference allow_deref;
13386 AllowDeferredHandleDereference allow_deferred_deref;
13387 Trace(name, chunk->graph(), chunk);
13388}
13389
13390
13391void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
13392 DCHECK(!graph->isolate()->concurrent_recompilation_enabled());
13393 AllowHandleDereference allow_deref;
13394 AllowDeferredHandleDereference allow_deferred_deref;
13395 Trace(name, graph, NULL);
13396}
13397
13398
13399void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
13400 Tag tag(this, "cfg");
13401 PrintStringProperty("name", name);
13402 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
13403 for (int i = 0; i < blocks->length(); i++) {
13404 HBasicBlock* current = blocks->at(i);
13405 Tag block_tag(this, "block");
13406 PrintBlockProperty("name", current->block_id());
13407 PrintIntProperty("from_bci", -1);
13408 PrintIntProperty("to_bci", -1);
13409
13410 if (!current->predecessors()->is_empty()) {
13411 PrintIndent();
13412 trace_.Add("predecessors");
13413 for (int j = 0; j < current->predecessors()->length(); ++j) {
13414 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
13415 }
13416 trace_.Add("\n");
13417 } else {
13418 PrintEmptyProperty("predecessors");
13419 }
13420
13421 if (current->end()->SuccessorCount() == 0) {
13422 PrintEmptyProperty("successors");
13423 } else {
13424 PrintIndent();
13425 trace_.Add("successors");
13426 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
13427 trace_.Add(" \"B%d\"", it.Current()->block_id());
13428 }
13429 trace_.Add("\n");
13430 }
13431
13432 PrintEmptyProperty("xhandlers");
13433
13434 {
13435 PrintIndent();
13436 trace_.Add("flags");
13437 if (current->IsLoopSuccessorDominator()) {
13438 trace_.Add(" \"dom-loop-succ\"");
13439 }
13440 if (current->IsUnreachable()) {
13441 trace_.Add(" \"dead\"");
13442 }
13443 if (current->is_osr_entry()) {
13444 trace_.Add(" \"osr\"");
13445 }
13446 trace_.Add("\n");
13447 }
13448
13449 if (current->dominator() != NULL) {
13450 PrintBlockProperty("dominator", current->dominator()->block_id());
13451 }
13452
13453 PrintIntProperty("loop_depth", current->LoopNestingDepth());
13454
13455 if (chunk != NULL) {
13456 int first_index = current->first_instruction_index();
13457 int last_index = current->last_instruction_index();
13458 PrintIntProperty(
13459 "first_lir_id",
13460 LifetimePosition::FromInstructionIndex(first_index).Value());
13461 PrintIntProperty(
13462 "last_lir_id",
13463 LifetimePosition::FromInstructionIndex(last_index).Value());
13464 }
13465
13466 {
13467 Tag states_tag(this, "states");
13468 Tag locals_tag(this, "locals");
13469 int total = current->phis()->length();
13470 PrintIntProperty("size", current->phis()->length());
13471 PrintStringProperty("method", "None");
13472 for (int j = 0; j < total; ++j) {
13473 HPhi* phi = current->phis()->at(j);
13474 PrintIndent();
13475 std::ostringstream os;
13476 os << phi->merged_index() << " " << NameOf(phi) << " " << *phi << "\n";
13477 trace_.Add(os.str().c_str());
13478 }
13479 }
13480
13481 {
13482 Tag HIR_tag(this, "HIR");
13483 for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
13484 HInstruction* instruction = it.Current();
13485 int uses = instruction->UseCount();
13486 PrintIndent();
13487 std::ostringstream os;
13488 os << "0 " << uses << " " << NameOf(instruction) << " " << *instruction;
13489 if (graph->info()->is_tracking_positions() &&
13490 instruction->has_position() && instruction->position().raw() != 0) {
13491 const SourcePosition pos = instruction->position();
13492 os << " pos:";
13493 if (pos.inlining_id() != 0) os << pos.inlining_id() << "_";
13494 os << pos.position();
13495 }
13496 os << " <|@\n";
13497 trace_.Add(os.str().c_str());
13498 }
13499 }
13500
13501
13502 if (chunk != NULL) {
13503 Tag LIR_tag(this, "LIR");
13504 int first_index = current->first_instruction_index();
13505 int last_index = current->last_instruction_index();
13506 if (first_index != -1 && last_index != -1) {
13507 const ZoneList<LInstruction*>* instructions = chunk->instructions();
13508 for (int i = first_index; i <= last_index; ++i) {
13509 LInstruction* linstr = instructions->at(i);
13510 if (linstr != NULL) {
13511 PrintIndent();
13512 trace_.Add("%d ",
13513 LifetimePosition::FromInstructionIndex(i).Value());
13514 linstr->PrintTo(&trace_);
13515 std::ostringstream os;
13516 os << " [hir:" << NameOf(linstr->hydrogen_value()) << "] <|@\n";
13517 trace_.Add(os.str().c_str());
13518 }
13519 }
13520 }
13521 }
13522 }
13523}
13524
13525
13526void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
13527 Tag tag(this, "intervals");
13528 PrintStringProperty("name", name);
13529
13530 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
13531 for (int i = 0; i < fixed_d->length(); ++i) {
13532 TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
13533 }
13534
13535 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
13536 for (int i = 0; i < fixed->length(); ++i) {
13537 TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
13538 }
13539
13540 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
13541 for (int i = 0; i < live_ranges->length(); ++i) {
13542 TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
13543 }
13544}
13545
13546
13547void HTracer::TraceLiveRange(LiveRange* range, const char* type,
13548 Zone* zone) {
13549 if (range != NULL && !range->IsEmpty()) {
13550 PrintIndent();
13551 trace_.Add("%d %s", range->id(), type);
13552 if (range->HasRegisterAssigned()) {
13553 LOperand* op = range->CreateAssignedOperand(zone);
13554 int assigned_reg = op->index();
13555 if (op->IsDoubleRegister()) {
13556 trace_.Add(" \"%s\"",
Ben Murdoch61f157c2016-09-16 13:49:30 +010013557 GetRegConfig()->GetDoubleRegisterName(assigned_reg));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013558 } else {
13559 DCHECK(op->IsRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +010013560 trace_.Add(" \"%s\"",
13561 GetRegConfig()->GetGeneralRegisterName(assigned_reg));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013562 }
13563 } else if (range->IsSpilled()) {
13564 LOperand* op = range->TopLevel()->GetSpillOperand();
13565 if (op->IsDoubleStackSlot()) {
13566 trace_.Add(" \"double_stack:%d\"", op->index());
13567 } else {
13568 DCHECK(op->IsStackSlot());
13569 trace_.Add(" \"stack:%d\"", op->index());
13570 }
13571 }
13572 int parent_index = -1;
13573 if (range->IsChild()) {
13574 parent_index = range->parent()->id();
13575 } else {
13576 parent_index = range->id();
13577 }
13578 LOperand* op = range->FirstHint();
13579 int hint_index = -1;
13580 if (op != NULL && op->IsUnallocated()) {
13581 hint_index = LUnallocated::cast(op)->virtual_register();
13582 }
13583 trace_.Add(" %d %d", parent_index, hint_index);
13584 UseInterval* cur_interval = range->first_interval();
13585 while (cur_interval != NULL && range->Covers(cur_interval->start())) {
13586 trace_.Add(" [%d, %d[",
13587 cur_interval->start().Value(),
13588 cur_interval->end().Value());
13589 cur_interval = cur_interval->next();
13590 }
13591
13592 UsePosition* current_pos = range->first_pos();
13593 while (current_pos != NULL) {
13594 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
13595 trace_.Add(" %d M", current_pos->pos().Value());
13596 }
13597 current_pos = current_pos->next();
13598 }
13599
13600 trace_.Add(" \"\"\n");
13601 }
13602}
13603
13604
13605void HTracer::FlushToFile() {
13606 AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
13607 false);
13608 trace_.Reset();
13609}
13610
13611
13612void HStatistics::Initialize(CompilationInfo* info) {
13613 if (!info->has_shared_info()) return;
13614 source_size_ += info->shared_info()->SourceSize();
13615}
13616
13617
13618void HStatistics::Print() {
13619 PrintF(
13620 "\n"
13621 "----------------------------------------"
13622 "----------------------------------------\n"
13623 "--- Hydrogen timing results:\n"
13624 "----------------------------------------"
13625 "----------------------------------------\n");
13626 base::TimeDelta sum;
13627 for (int i = 0; i < times_.length(); ++i) {
13628 sum += times_[i];
13629 }
13630
13631 for (int i = 0; i < names_.length(); ++i) {
13632 PrintF("%33s", names_[i]);
13633 double ms = times_[i].InMillisecondsF();
13634 double percent = times_[i].PercentOf(sum);
13635 PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
13636
13637 size_t size = sizes_[i];
13638 double size_percent = static_cast<double>(size) * 100 / total_size_;
13639 PrintF(" %9zu bytes / %4.1f %%\n", size, size_percent);
13640 }
13641
13642 PrintF(
13643 "----------------------------------------"
13644 "----------------------------------------\n");
13645 base::TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
13646 PrintF("%33s %8.3f ms / %4.1f %% \n", "Create graph",
13647 create_graph_.InMillisecondsF(), create_graph_.PercentOf(total));
13648 PrintF("%33s %8.3f ms / %4.1f %% \n", "Optimize graph",
13649 optimize_graph_.InMillisecondsF(), optimize_graph_.PercentOf(total));
13650 PrintF("%33s %8.3f ms / %4.1f %% \n", "Generate and install code",
13651 generate_code_.InMillisecondsF(), generate_code_.PercentOf(total));
13652 PrintF(
13653 "----------------------------------------"
13654 "----------------------------------------\n");
13655 PrintF("%33s %8.3f ms %9zu bytes\n", "Total",
13656 total.InMillisecondsF(), total_size_);
13657 PrintF("%33s (%.1f times slower than full code gen)\n", "",
13658 total.TimesOf(full_code_gen_));
13659
13660 double source_size_in_kb = static_cast<double>(source_size_) / 1024;
13661 double normalized_time = source_size_in_kb > 0
13662 ? total.InMillisecondsF() / source_size_in_kb
13663 : 0;
13664 double normalized_size_in_kb =
13665 source_size_in_kb > 0
13666 ? static_cast<double>(total_size_) / 1024 / source_size_in_kb
13667 : 0;
13668 PrintF("%33s %8.3f ms %7.3f kB allocated\n",
13669 "Average per kB source", normalized_time, normalized_size_in_kb);
13670}
13671
13672
13673void HStatistics::SaveTiming(const char* name, base::TimeDelta time,
13674 size_t size) {
13675 total_size_ += size;
13676 for (int i = 0; i < names_.length(); ++i) {
13677 if (strcmp(names_[i], name) == 0) {
13678 times_[i] += time;
13679 sizes_[i] += size;
13680 return;
13681 }
13682 }
13683 names_.Add(name);
13684 times_.Add(time);
13685 sizes_.Add(size);
13686}
13687
13688
13689HPhase::~HPhase() {
13690 if (ShouldProduceTraceOutput()) {
13691 isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
13692 }
13693
13694#ifdef DEBUG
13695 graph_->Verify(false); // No full verify.
13696#endif
13697}
13698
13699} // namespace internal
13700} // namespace v8