blob: 9c5fa15358e03797a1a2b8757fa2385b066de3c8 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/hydrogen.h"
6
7#include <sstream>
8
9#include "src/allocation-site-scopes.h"
10#include "src/ast/ast-numbering.h"
11#include "src/ast/scopeinfo.h"
12#include "src/code-factory.h"
13#include "src/crankshaft/hydrogen-bce.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/crankshaft/hydrogen-canonicalize.h"
15#include "src/crankshaft/hydrogen-check-elimination.h"
16#include "src/crankshaft/hydrogen-dce.h"
17#include "src/crankshaft/hydrogen-dehoist.h"
18#include "src/crankshaft/hydrogen-environment-liveness.h"
19#include "src/crankshaft/hydrogen-escape-analysis.h"
20#include "src/crankshaft/hydrogen-gvn.h"
21#include "src/crankshaft/hydrogen-infer-representation.h"
22#include "src/crankshaft/hydrogen-infer-types.h"
23#include "src/crankshaft/hydrogen-load-elimination.h"
24#include "src/crankshaft/hydrogen-mark-deoptimize.h"
25#include "src/crankshaft/hydrogen-mark-unreachable.h"
26#include "src/crankshaft/hydrogen-osr.h"
27#include "src/crankshaft/hydrogen-range-analysis.h"
28#include "src/crankshaft/hydrogen-redundant-phi.h"
29#include "src/crankshaft/hydrogen-removable-simulates.h"
30#include "src/crankshaft/hydrogen-representation-changes.h"
31#include "src/crankshaft/hydrogen-sce.h"
32#include "src/crankshaft/hydrogen-store-elimination.h"
33#include "src/crankshaft/hydrogen-uint32-analysis.h"
34#include "src/crankshaft/lithium-allocator.h"
35#include "src/crankshaft/typing.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010036#include "src/field-type.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037#include "src/full-codegen/full-codegen.h"
38#include "src/ic/call-optimization.h"
39#include "src/ic/ic.h"
40// GetRootConstructor
41#include "src/ic/ic-inl.h"
42#include "src/isolate-inl.h"
43#include "src/parsing/parser.h"
44#include "src/runtime/runtime.h"
45
46#if V8_TARGET_ARCH_IA32
47#include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
48#elif V8_TARGET_ARCH_X64
49#include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
50#elif V8_TARGET_ARCH_ARM64
51#include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
52#elif V8_TARGET_ARCH_ARM
53#include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
54#elif V8_TARGET_ARCH_PPC
55#include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
56#elif V8_TARGET_ARCH_MIPS
57#include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
58#elif V8_TARGET_ARCH_MIPS64
59#include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
Ben Murdochda12d292016-06-02 14:46:10 +010060#elif V8_TARGET_ARCH_S390
61#include "src/crankshaft/s390/lithium-codegen-s390.h" // NOLINT
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062#elif V8_TARGET_ARCH_X87
63#include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
64#else
65#error Unsupported target architecture.
66#endif
67
68namespace v8 {
69namespace internal {
70
Ben Murdochc5610432016-08-08 18:44:38 +010071class HOptimizedGraphBuilderWithPositions : public HOptimizedGraphBuilder {
72 public:
73 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
74 : HOptimizedGraphBuilder(info) {}
75
76#define DEF_VISIT(type) \
77 void Visit##type(type* node) override { \
78 SourcePosition old_position = SourcePosition::Unknown(); \
79 if (node->position() != RelocInfo::kNoPosition) { \
80 old_position = source_position(); \
81 SetSourcePosition(node->position()); \
82 } \
83 HOptimizedGraphBuilder::Visit##type(node); \
84 if (!old_position.IsUnknown()) { \
85 set_source_position(old_position); \
86 } \
87 }
88 EXPRESSION_NODE_LIST(DEF_VISIT)
89#undef DEF_VISIT
90
91#define DEF_VISIT(type) \
92 void Visit##type(type* node) override { \
93 SourcePosition old_position = SourcePosition::Unknown(); \
94 if (node->position() != RelocInfo::kNoPosition) { \
95 old_position = source_position(); \
96 SetSourcePosition(node->position()); \
97 } \
98 HOptimizedGraphBuilder::Visit##type(node); \
99 if (!old_position.IsUnknown()) { \
100 set_source_position(old_position); \
101 } \
102 }
103 STATEMENT_NODE_LIST(DEF_VISIT)
104#undef DEF_VISIT
105
106#define DEF_VISIT(type) \
107 void Visit##type(type* node) override { \
108 HOptimizedGraphBuilder::Visit##type(node); \
109 }
110 DECLARATION_NODE_LIST(DEF_VISIT)
111#undef DEF_VISIT
112};
113
114HCompilationJob::Status HCompilationJob::CreateGraphImpl() {
115 bool dont_crankshaft = info()->shared_info()->dont_crankshaft();
116
117 // Optimization requires a version of fullcode with deoptimization support.
118 // Recompile the unoptimized version of the code if the current version
119 // doesn't have deoptimization support already.
120 // Otherwise, if we are gathering compilation time and space statistics
121 // for hydrogen, gather baseline statistics for a fullcode compilation.
122 bool should_recompile = !info()->shared_info()->has_deoptimization_support();
123 if (should_recompile || FLAG_hydrogen_stats) {
124 base::ElapsedTimer timer;
125 if (FLAG_hydrogen_stats) {
126 timer.Start();
127 }
128 if (!Compiler::EnsureDeoptimizationSupport(info())) {
129 return FAILED;
130 }
131 if (FLAG_hydrogen_stats) {
132 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
133 }
134 }
135 DCHECK(info()->shared_info()->has_deoptimization_support());
136 DCHECK(!info()->shared_info()->never_compiled());
137
138 if (!isolate()->use_crankshaft() || dont_crankshaft) {
139 // Crankshaft is entirely disabled.
140 return FAILED;
141 }
142
143 // Check the whitelist for Crankshaft.
144 if (!info()->shared_info()->PassesFilter(FLAG_hydrogen_filter)) {
145 return AbortOptimization(kHydrogenFilter);
146 }
147
148 Scope* scope = info()->scope();
149 if (LUnallocated::TooManyParameters(scope->num_parameters())) {
150 // Crankshaft would require too many Lithium operands.
151 return AbortOptimization(kTooManyParameters);
152 }
153
154 if (info()->is_osr() &&
155 LUnallocated::TooManyParametersOrStackSlots(scope->num_parameters(),
156 scope->num_stack_slots())) {
157 // Crankshaft would require too many Lithium operands.
158 return AbortOptimization(kTooManyParametersLocals);
159 }
160
161 if (IsGeneratorFunction(info()->shared_info()->kind())) {
162 // Crankshaft does not support generators.
163 return AbortOptimization(kGenerator);
164 }
165
166 if (FLAG_trace_hydrogen) {
167 isolate()->GetHTracer()->TraceCompilation(info());
168 }
169
170 // Optimization could have been disabled by the parser. Note that this check
171 // is only needed because the Hydrogen graph builder is missing some bailouts.
172 if (info()->shared_info()->optimization_disabled()) {
173 return AbortOptimization(
174 info()->shared_info()->disable_optimization_reason());
175 }
176
177 HOptimizedGraphBuilder* graph_builder =
178 (info()->is_tracking_positions() || FLAG_trace_ic)
179 ? new (info()->zone()) HOptimizedGraphBuilderWithPositions(info())
180 : new (info()->zone()) HOptimizedGraphBuilder(info());
181
182 // Type-check the function.
183 AstTyper(info()->isolate(), info()->zone(), info()->closure(),
184 info()->scope(), info()->osr_ast_id(), info()->literal(),
185 graph_builder->bounds())
186 .Run();
187
188 graph_ = graph_builder->CreateGraph();
189
190 if (isolate()->has_pending_exception()) {
191 return FAILED;
192 }
193
194 if (graph_ == NULL) return FAILED;
195
196 if (info()->dependencies()->HasAborted()) {
197 // Dependency has changed during graph creation. Let's try again later.
198 return RetryOptimization(kBailedOutDueToDependencyChange);
199 }
200
201 return SUCCEEDED;
202}
203
204HCompilationJob::Status HCompilationJob::OptimizeGraphImpl() {
205 DCHECK(graph_ != NULL);
206 BailoutReason bailout_reason = kNoReason;
207
208 if (graph_->Optimize(&bailout_reason)) {
209 chunk_ = LChunk::NewChunk(graph_);
210 if (chunk_ != NULL) return SUCCEEDED;
211 } else if (bailout_reason != kNoReason) {
212 info()->AbortOptimization(bailout_reason);
213 }
214
215 return FAILED;
216}
217
218HCompilationJob::Status HCompilationJob::GenerateCodeImpl() {
219 DCHECK(chunk_ != NULL);
220 DCHECK(graph_ != NULL);
221 {
222 // Deferred handles reference objects that were accessible during
223 // graph creation. To make sure that we don't encounter inconsistencies
224 // between graph creation and code generation, we disallow accessing
225 // objects through deferred handles during the latter, with exceptions.
226 DisallowDeferredHandleDereference no_deferred_handle_deref;
227 Handle<Code> optimized_code = chunk_->Codegen();
228 if (optimized_code.is_null()) {
229 if (info()->bailout_reason() == kNoReason) {
230 return AbortOptimization(kCodeGenerationFailed);
231 }
232 return FAILED;
233 }
234 RegisterWeakObjectsInOptimizedCode(optimized_code);
235 info()->SetCode(optimized_code);
236 }
237 // Add to the weak list of optimized code objects.
238 info()->context()->native_context()->AddOptimizedCode(*info()->code());
239 return SUCCEEDED;
240}
241
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242HBasicBlock::HBasicBlock(HGraph* graph)
243 : block_id_(graph->GetNextBlockID()),
244 graph_(graph),
245 phis_(4, graph->zone()),
246 first_(NULL),
247 last_(NULL),
248 end_(NULL),
249 loop_information_(NULL),
250 predecessors_(2, graph->zone()),
251 dominator_(NULL),
252 dominated_blocks_(4, graph->zone()),
253 last_environment_(NULL),
254 argument_count_(-1),
255 first_instruction_index_(-1),
256 last_instruction_index_(-1),
257 deleted_phis_(4, graph->zone()),
258 parent_loop_header_(NULL),
259 inlined_entry_block_(NULL),
260 is_inline_return_target_(false),
261 is_reachable_(true),
262 dominates_loop_successors_(false),
263 is_osr_entry_(false),
264 is_ordered_(false) { }
265
266
267Isolate* HBasicBlock::isolate() const {
268 return graph_->isolate();
269}
270
271
272void HBasicBlock::MarkUnreachable() {
273 is_reachable_ = false;
274}
275
276
277void HBasicBlock::AttachLoopInformation() {
278 DCHECK(!IsLoopHeader());
279 loop_information_ = new(zone()) HLoopInformation(this, zone());
280}
281
282
283void HBasicBlock::DetachLoopInformation() {
284 DCHECK(IsLoopHeader());
285 loop_information_ = NULL;
286}
287
288
289void HBasicBlock::AddPhi(HPhi* phi) {
290 DCHECK(!IsStartBlock());
291 phis_.Add(phi, zone());
292 phi->SetBlock(this);
293}
294
295
296void HBasicBlock::RemovePhi(HPhi* phi) {
297 DCHECK(phi->block() == this);
298 DCHECK(phis_.Contains(phi));
299 phi->Kill();
300 phis_.RemoveElement(phi);
301 phi->SetBlock(NULL);
302}
303
304
305void HBasicBlock::AddInstruction(HInstruction* instr, SourcePosition position) {
306 DCHECK(!IsStartBlock() || !IsFinished());
307 DCHECK(!instr->IsLinked());
308 DCHECK(!IsFinished());
309
310 if (!position.IsUnknown()) {
311 instr->set_position(position);
312 }
313 if (first_ == NULL) {
314 DCHECK(last_environment() != NULL);
315 DCHECK(!last_environment()->ast_id().IsNone());
316 HBlockEntry* entry = new(zone()) HBlockEntry();
317 entry->InitializeAsFirst(this);
318 if (!position.IsUnknown()) {
319 entry->set_position(position);
320 } else {
321 DCHECK(!FLAG_hydrogen_track_positions ||
322 !graph()->info()->IsOptimizing() || instr->IsAbnormalExit());
323 }
324 first_ = last_ = entry;
325 }
326 instr->InsertAfter(last_);
327}
328
329
330HPhi* HBasicBlock::AddNewPhi(int merged_index) {
331 if (graph()->IsInsideNoSideEffectsScope()) {
332 merged_index = HPhi::kInvalidMergedIndex;
333 }
334 HPhi* phi = new(zone()) HPhi(merged_index, zone());
335 AddPhi(phi);
336 return phi;
337}
338
339
340HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
341 RemovableSimulate removable) {
342 DCHECK(HasEnvironment());
343 HEnvironment* environment = last_environment();
344 DCHECK(ast_id.IsNone() ||
345 ast_id == BailoutId::StubEntry() ||
346 environment->closure()->shared()->VerifyBailoutId(ast_id));
347
348 int push_count = environment->push_count();
349 int pop_count = environment->pop_count();
350
351 HSimulate* instr =
352 new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
353#ifdef DEBUG
354 instr->set_closure(environment->closure());
355#endif
356 // Order of pushed values: newest (top of stack) first. This allows
357 // HSimulate::MergeWith() to easily append additional pushed values
358 // that are older (from further down the stack).
359 for (int i = 0; i < push_count; ++i) {
360 instr->AddPushedValue(environment->ExpressionStackAt(i));
361 }
362 for (GrowableBitVector::Iterator it(environment->assigned_variables(),
363 zone());
364 !it.Done();
365 it.Advance()) {
366 int index = it.Current();
367 instr->AddAssignedValue(index, environment->Lookup(index));
368 }
369 environment->ClearHistory();
370 return instr;
371}
372
373
374void HBasicBlock::Finish(HControlInstruction* end, SourcePosition position) {
375 DCHECK(!IsFinished());
376 AddInstruction(end, position);
377 end_ = end;
378 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
379 it.Current()->RegisterPredecessor(this);
380 }
381}
382
383
384void HBasicBlock::Goto(HBasicBlock* block, SourcePosition position,
385 FunctionState* state, bool add_simulate) {
386 bool drop_extra = state != NULL &&
387 state->inlining_kind() == NORMAL_RETURN;
388
389 if (block->IsInlineReturnTarget()) {
390 HEnvironment* env = last_environment();
391 int argument_count = env->arguments_environment()->parameter_count();
392 AddInstruction(new(zone())
393 HLeaveInlined(state->entry(), argument_count),
394 position);
395 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
396 }
397
398 if (add_simulate) AddNewSimulate(BailoutId::None(), position);
399 HGoto* instr = new(zone()) HGoto(block);
400 Finish(instr, position);
401}
402
403
404void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state,
405 SourcePosition position) {
406 HBasicBlock* target = state->function_return();
407 bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
408
409 DCHECK(target->IsInlineReturnTarget());
410 DCHECK(return_value != NULL);
411 HEnvironment* env = last_environment();
412 int argument_count = env->arguments_environment()->parameter_count();
413 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
414 position);
415 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
416 last_environment()->Push(return_value);
417 AddNewSimulate(BailoutId::None(), position);
418 HGoto* instr = new(zone()) HGoto(target);
419 Finish(instr, position);
420}
421
422
423void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
424 DCHECK(!HasEnvironment());
425 DCHECK(first() == NULL);
426 UpdateEnvironment(env);
427}
428
429
430void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
431 last_environment_ = env;
432 graph()->update_maximum_environment_size(env->first_expression_index());
433}
434
435
436void HBasicBlock::SetJoinId(BailoutId ast_id) {
437 int length = predecessors_.length();
438 DCHECK(length > 0);
439 for (int i = 0; i < length; i++) {
440 HBasicBlock* predecessor = predecessors_[i];
441 DCHECK(predecessor->end()->IsGoto());
442 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
443 DCHECK(i != 0 ||
444 (predecessor->last_environment()->closure().is_null() ||
445 predecessor->last_environment()->closure()->shared()
446 ->VerifyBailoutId(ast_id)));
447 simulate->set_ast_id(ast_id);
448 predecessor->last_environment()->set_ast_id(ast_id);
449 }
450}
451
452
453bool HBasicBlock::Dominates(HBasicBlock* other) const {
454 HBasicBlock* current = other->dominator();
455 while (current != NULL) {
456 if (current == this) return true;
457 current = current->dominator();
458 }
459 return false;
460}
461
462
463bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
464 if (this == other) return true;
465 return Dominates(other);
466}
467
468
469int HBasicBlock::LoopNestingDepth() const {
470 const HBasicBlock* current = this;
471 int result = (current->IsLoopHeader()) ? 1 : 0;
472 while (current->parent_loop_header() != NULL) {
473 current = current->parent_loop_header();
474 result++;
475 }
476 return result;
477}
478
479
480void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
481 DCHECK(IsLoopHeader());
482
483 SetJoinId(stmt->EntryId());
484 if (predecessors()->length() == 1) {
485 // This is a degenerated loop.
486 DetachLoopInformation();
487 return;
488 }
489
490 // Only the first entry into the loop is from outside the loop. All other
491 // entries must be back edges.
492 for (int i = 1; i < predecessors()->length(); ++i) {
493 loop_information()->RegisterBackEdge(predecessors()->at(i));
494 }
495}
496
497
498void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
499 DCHECK(IsFinished());
500 HBasicBlock* succ_block = end()->SuccessorAt(succ);
501
502 DCHECK(succ_block->predecessors()->length() == 1);
503 succ_block->MarkUnreachable();
504}
505
506
507void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
508 if (HasPredecessor()) {
509 // Only loop header blocks can have a predecessor added after
510 // instructions have been added to the block (they have phis for all
511 // values in the environment, these phis may be eliminated later).
512 DCHECK(IsLoopHeader() || first_ == NULL);
513 HEnvironment* incoming_env = pred->last_environment();
514 if (IsLoopHeader()) {
515 DCHECK_EQ(phis()->length(), incoming_env->length());
516 for (int i = 0; i < phis_.length(); ++i) {
517 phis_[i]->AddInput(incoming_env->values()->at(i));
518 }
519 } else {
520 last_environment()->AddIncomingEdge(this, pred->last_environment());
521 }
522 } else if (!HasEnvironment() && !IsFinished()) {
523 DCHECK(!IsLoopHeader());
524 SetInitialEnvironment(pred->last_environment()->Copy());
525 }
526
527 predecessors_.Add(pred, zone());
528}
529
530
531void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
532 DCHECK(!dominated_blocks_.Contains(block));
533 // Keep the list of dominated blocks sorted such that if there is two
534 // succeeding block in this list, the predecessor is before the successor.
535 int index = 0;
536 while (index < dominated_blocks_.length() &&
537 dominated_blocks_[index]->block_id() < block->block_id()) {
538 ++index;
539 }
540 dominated_blocks_.InsertAt(index, block, zone());
541}
542
543
544void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
545 if (dominator_ == NULL) {
546 dominator_ = other;
547 other->AddDominatedBlock(this);
548 } else if (other->dominator() != NULL) {
549 HBasicBlock* first = dominator_;
550 HBasicBlock* second = other;
551
552 while (first != second) {
553 if (first->block_id() > second->block_id()) {
554 first = first->dominator();
555 } else {
556 second = second->dominator();
557 }
558 DCHECK(first != NULL && second != NULL);
559 }
560
561 if (dominator_ != first) {
562 DCHECK(dominator_->dominated_blocks_.Contains(this));
563 dominator_->dominated_blocks_.RemoveElement(this);
564 dominator_ = first;
565 first->AddDominatedBlock(this);
566 }
567 }
568}
569
570
571void HBasicBlock::AssignLoopSuccessorDominators() {
572 // Mark blocks that dominate all subsequent reachable blocks inside their
573 // loop. Exploit the fact that blocks are sorted in reverse post order. When
574 // the loop is visited in increasing block id order, if the number of
575 // non-loop-exiting successor edges at the dominator_candidate block doesn't
576 // exceed the number of previously encountered predecessor edges, there is no
577 // path from the loop header to any block with higher id that doesn't go
578 // through the dominator_candidate block. In this case, the
579 // dominator_candidate block is guaranteed to dominate all blocks reachable
580 // from it with higher ids.
581 HBasicBlock* last = loop_information()->GetLastBackEdge();
582 int outstanding_successors = 1; // one edge from the pre-header
583 // Header always dominates everything.
584 MarkAsLoopSuccessorDominator();
585 for (int j = block_id(); j <= last->block_id(); ++j) {
586 HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
587 for (HPredecessorIterator it(dominator_candidate); !it.Done();
588 it.Advance()) {
589 HBasicBlock* predecessor = it.Current();
590 // Don't count back edges.
591 if (predecessor->block_id() < dominator_candidate->block_id()) {
592 outstanding_successors--;
593 }
594 }
595
596 // If more successors than predecessors have been seen in the loop up to
597 // now, it's not possible to guarantee that the current block dominates
598 // all of the blocks with higher IDs. In this case, assume conservatively
599 // that those paths through loop that don't go through the current block
600 // contain all of the loop's dependencies. Also be careful to record
601 // dominator information about the current loop that's being processed,
602 // and not nested loops, which will be processed when
603 // AssignLoopSuccessorDominators gets called on their header.
604 DCHECK(outstanding_successors >= 0);
605 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
606 if (outstanding_successors == 0 &&
607 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
608 dominator_candidate->MarkAsLoopSuccessorDominator();
609 }
610 HControlInstruction* end = dominator_candidate->end();
611 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
612 HBasicBlock* successor = it.Current();
613 // Only count successors that remain inside the loop and don't loop back
614 // to a loop header.
615 if (successor->block_id() > dominator_candidate->block_id() &&
616 successor->block_id() <= last->block_id()) {
617 // Backwards edges must land on loop headers.
618 DCHECK(successor->block_id() > dominator_candidate->block_id() ||
619 successor->IsLoopHeader());
620 outstanding_successors++;
621 }
622 }
623 }
624}
625
626
627int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
628 for (int i = 0; i < predecessors_.length(); ++i) {
629 if (predecessors_[i] == predecessor) return i;
630 }
631 UNREACHABLE();
632 return -1;
633}
634
635
636#ifdef DEBUG
637void HBasicBlock::Verify() {
638 // Check that every block is finished.
639 DCHECK(IsFinished());
640 DCHECK(block_id() >= 0);
641
642 // Check that the incoming edges are in edge split form.
643 if (predecessors_.length() > 1) {
644 for (int i = 0; i < predecessors_.length(); ++i) {
645 DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
646 }
647 }
648}
649#endif
650
651
652void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
653 this->back_edges_.Add(block, block->zone());
654 AddBlock(block);
655}
656
657
658HBasicBlock* HLoopInformation::GetLastBackEdge() const {
659 int max_id = -1;
660 HBasicBlock* result = NULL;
661 for (int i = 0; i < back_edges_.length(); ++i) {
662 HBasicBlock* cur = back_edges_[i];
663 if (cur->block_id() > max_id) {
664 max_id = cur->block_id();
665 result = cur;
666 }
667 }
668 return result;
669}
670
671
672void HLoopInformation::AddBlock(HBasicBlock* block) {
673 if (block == loop_header()) return;
674 if (block->parent_loop_header() == loop_header()) return;
675 if (block->parent_loop_header() != NULL) {
676 AddBlock(block->parent_loop_header());
677 } else {
678 block->set_parent_loop_header(loop_header());
679 blocks_.Add(block, block->zone());
680 for (int i = 0; i < block->predecessors()->length(); ++i) {
681 AddBlock(block->predecessors()->at(i));
682 }
683 }
684}
685
686
687#ifdef DEBUG
688
689// Checks reachability of the blocks in this graph and stores a bit in
690// the BitVector "reachable()" for every block that can be reached
691// from the start block of the graph. If "dont_visit" is non-null, the given
692// block is treated as if it would not be part of the graph. "visited_count()"
693// returns the number of reachable blocks.
694class ReachabilityAnalyzer BASE_EMBEDDED {
695 public:
696 ReachabilityAnalyzer(HBasicBlock* entry_block,
697 int block_count,
698 HBasicBlock* dont_visit)
699 : visited_count_(0),
700 stack_(16, entry_block->zone()),
701 reachable_(block_count, entry_block->zone()),
702 dont_visit_(dont_visit) {
703 PushBlock(entry_block);
704 Analyze();
705 }
706
707 int visited_count() const { return visited_count_; }
708 const BitVector* reachable() const { return &reachable_; }
709
710 private:
711 void PushBlock(HBasicBlock* block) {
712 if (block != NULL && block != dont_visit_ &&
713 !reachable_.Contains(block->block_id())) {
714 reachable_.Add(block->block_id());
715 stack_.Add(block, block->zone());
716 visited_count_++;
717 }
718 }
719
720 void Analyze() {
721 while (!stack_.is_empty()) {
722 HControlInstruction* end = stack_.RemoveLast()->end();
723 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
724 PushBlock(it.Current());
725 }
726 }
727 }
728
729 int visited_count_;
730 ZoneList<HBasicBlock*> stack_;
731 BitVector reachable_;
732 HBasicBlock* dont_visit_;
733};
734
735
736void HGraph::Verify(bool do_full_verify) const {
737 Heap::RelocationLock relocation_lock(isolate()->heap());
738 AllowHandleDereference allow_deref;
739 AllowDeferredHandleDereference allow_deferred_deref;
740 for (int i = 0; i < blocks_.length(); i++) {
741 HBasicBlock* block = blocks_.at(i);
742
743 block->Verify();
744
745 // Check that every block contains at least one node and that only the last
746 // node is a control instruction.
747 HInstruction* current = block->first();
748 DCHECK(current != NULL && current->IsBlockEntry());
749 while (current != NULL) {
750 DCHECK((current->next() == NULL) == current->IsControlInstruction());
751 DCHECK(current->block() == block);
752 current->Verify();
753 current = current->next();
754 }
755
756 // Check that successors are correctly set.
757 HBasicBlock* first = block->end()->FirstSuccessor();
758 HBasicBlock* second = block->end()->SecondSuccessor();
759 DCHECK(second == NULL || first != NULL);
760
761 // Check that the predecessor array is correct.
762 if (first != NULL) {
763 DCHECK(first->predecessors()->Contains(block));
764 if (second != NULL) {
765 DCHECK(second->predecessors()->Contains(block));
766 }
767 }
768
769 // Check that phis have correct arguments.
770 for (int j = 0; j < block->phis()->length(); j++) {
771 HPhi* phi = block->phis()->at(j);
772 phi->Verify();
773 }
774
775 // Check that all join blocks have predecessors that end with an
776 // unconditional goto and agree on their environment node id.
777 if (block->predecessors()->length() >= 2) {
778 BailoutId id =
779 block->predecessors()->first()->last_environment()->ast_id();
780 for (int k = 0; k < block->predecessors()->length(); k++) {
781 HBasicBlock* predecessor = block->predecessors()->at(k);
782 DCHECK(predecessor->end()->IsGoto() ||
783 predecessor->end()->IsDeoptimize());
784 DCHECK(predecessor->last_environment()->ast_id() == id);
785 }
786 }
787 }
788
789 // Check special property of first block to have no predecessors.
790 DCHECK(blocks_.at(0)->predecessors()->is_empty());
791
792 if (do_full_verify) {
793 // Check that the graph is fully connected.
794 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
795 DCHECK(analyzer.visited_count() == blocks_.length());
796
797 // Check that entry block dominator is NULL.
798 DCHECK(entry_block_->dominator() == NULL);
799
800 // Check dominators.
801 for (int i = 0; i < blocks_.length(); ++i) {
802 HBasicBlock* block = blocks_.at(i);
803 if (block->dominator() == NULL) {
804 // Only start block may have no dominator assigned to.
805 DCHECK(i == 0);
806 } else {
807 // Assert that block is unreachable if dominator must not be visited.
808 ReachabilityAnalyzer dominator_analyzer(entry_block_,
809 blocks_.length(),
810 block->dominator());
811 DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
812 }
813 }
814 }
815}
816
817#endif
818
819
820HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
821 int32_t value) {
822 if (!pointer->is_set()) {
823 // Can't pass GetInvalidContext() to HConstant::New, because that will
824 // recursively call GetConstant
825 HConstant* constant = HConstant::New(isolate(), zone(), NULL, value);
826 constant->InsertAfter(entry_block()->first());
827 pointer->set(constant);
828 return constant;
829 }
830 return ReinsertConstantIfNecessary(pointer->get());
831}
832
833
834HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
835 if (!constant->IsLinked()) {
836 // The constant was removed from the graph. Reinsert.
837 constant->ClearFlag(HValue::kIsDead);
838 constant->InsertAfter(entry_block()->first());
839 }
840 return constant;
841}
842
843
844HConstant* HGraph::GetConstant0() {
845 return GetConstant(&constant_0_, 0);
846}
847
848
849HConstant* HGraph::GetConstant1() {
850 return GetConstant(&constant_1_, 1);
851}
852
853
854HConstant* HGraph::GetConstantMinus1() {
855 return GetConstant(&constant_minus1_, -1);
856}
857
858
859HConstant* HGraph::GetConstantBool(bool value) {
860 return value ? GetConstantTrue() : GetConstantFalse();
861}
862
Ben Murdochda12d292016-06-02 14:46:10 +0100863#define DEFINE_GET_CONSTANT(Name, name, constant, type, htype, boolean_value, \
864 undetectable) \
865 HConstant* HGraph::GetConstant##Name() { \
866 if (!constant_##name##_.is_set()) { \
867 HConstant* constant = new (zone()) HConstant( \
868 Unique<Object>::CreateImmovable(isolate()->factory()->constant()), \
869 Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \
870 false, Representation::Tagged(), htype, true, boolean_value, \
871 undetectable, ODDBALL_TYPE); \
872 constant->InsertAfter(entry_block()->first()); \
873 constant_##name##_.set(constant); \
874 } \
875 return ReinsertConstantIfNecessary(constant_##name##_.get()); \
Ben Murdoch097c5b22016-05-18 11:27:45 +0100876 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000877
Ben Murdochda12d292016-06-02 14:46:10 +0100878DEFINE_GET_CONSTANT(Undefined, undefined, undefined_value, undefined,
879 HType::Undefined(), false, true)
880DEFINE_GET_CONSTANT(True, true, true_value, boolean, HType::Boolean(), true,
881 false)
882DEFINE_GET_CONSTANT(False, false, false_value, boolean, HType::Boolean(), false,
883 false)
884DEFINE_GET_CONSTANT(Hole, the_hole, the_hole_value, the_hole, HType::None(),
885 false, false)
886DEFINE_GET_CONSTANT(Null, null, null_value, null, HType::Null(), false, true)
887DEFINE_GET_CONSTANT(OptimizedOut, optimized_out, optimized_out, optimized_out,
888 HType::None(), false, false)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000889
890#undef DEFINE_GET_CONSTANT
891
892#define DEFINE_IS_CONSTANT(Name, name) \
893bool HGraph::IsConstant##Name(HConstant* constant) { \
894 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
895}
896DEFINE_IS_CONSTANT(Undefined, undefined)
897DEFINE_IS_CONSTANT(0, 0)
898DEFINE_IS_CONSTANT(1, 1)
899DEFINE_IS_CONSTANT(Minus1, minus1)
900DEFINE_IS_CONSTANT(True, true)
901DEFINE_IS_CONSTANT(False, false)
902DEFINE_IS_CONSTANT(Hole, the_hole)
903DEFINE_IS_CONSTANT(Null, null)
904
905#undef DEFINE_IS_CONSTANT
906
907
908HConstant* HGraph::GetInvalidContext() {
909 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
910}
911
912
913bool HGraph::IsStandardConstant(HConstant* constant) {
914 if (IsConstantUndefined(constant)) return true;
915 if (IsConstant0(constant)) return true;
916 if (IsConstant1(constant)) return true;
917 if (IsConstantMinus1(constant)) return true;
918 if (IsConstantTrue(constant)) return true;
919 if (IsConstantFalse(constant)) return true;
920 if (IsConstantHole(constant)) return true;
921 if (IsConstantNull(constant)) return true;
922 return false;
923}
924
925
926HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
927
928
929HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
930 : needs_compare_(true) {
931 Initialize(builder);
932}
933
934
935HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
936 HIfContinuation* continuation)
937 : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
938 InitializeDontCreateBlocks(builder);
939 continuation->Continue(&first_true_block_, &first_false_block_);
940}
941
942
943void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
944 HGraphBuilder* builder) {
945 builder_ = builder;
946 finished_ = false;
947 did_then_ = false;
948 did_else_ = false;
949 did_else_if_ = false;
950 did_and_ = false;
951 did_or_ = false;
952 captured_ = false;
953 pending_merge_block_ = false;
954 split_edge_merge_block_ = NULL;
955 merge_at_join_blocks_ = NULL;
956 normal_merge_at_join_block_count_ = 0;
957 deopt_merge_at_join_block_count_ = 0;
958}
959
960
961void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
962 InitializeDontCreateBlocks(builder);
963 HEnvironment* env = builder->environment();
964 first_true_block_ = builder->CreateBasicBlock(env->Copy());
965 first_false_block_ = builder->CreateBasicBlock(env->Copy());
966}
967
968
969HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
970 HControlInstruction* compare) {
971 DCHECK(did_then_ == did_else_);
972 if (did_else_) {
973 // Handle if-then-elseif
974 did_else_if_ = true;
975 did_else_ = false;
976 did_then_ = false;
977 did_and_ = false;
978 did_or_ = false;
979 pending_merge_block_ = false;
980 split_edge_merge_block_ = NULL;
981 HEnvironment* env = builder()->environment();
982 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
983 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
984 }
985 if (split_edge_merge_block_ != NULL) {
986 HEnvironment* env = first_false_block_->last_environment();
987 HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
988 if (did_or_) {
989 compare->SetSuccessorAt(0, split_edge);
990 compare->SetSuccessorAt(1, first_false_block_);
991 } else {
992 compare->SetSuccessorAt(0, first_true_block_);
993 compare->SetSuccessorAt(1, split_edge);
994 }
995 builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
996 } else {
997 compare->SetSuccessorAt(0, first_true_block_);
998 compare->SetSuccessorAt(1, first_false_block_);
999 }
1000 builder()->FinishCurrentBlock(compare);
1001 needs_compare_ = false;
1002 return compare;
1003}
1004
1005
1006void HGraphBuilder::IfBuilder::Or() {
1007 DCHECK(!needs_compare_);
1008 DCHECK(!did_and_);
1009 did_or_ = true;
1010 HEnvironment* env = first_false_block_->last_environment();
1011 if (split_edge_merge_block_ == NULL) {
1012 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
1013 builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
1014 first_true_block_ = split_edge_merge_block_;
1015 }
1016 builder()->set_current_block(first_false_block_);
1017 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
1018}
1019
1020
1021void HGraphBuilder::IfBuilder::And() {
1022 DCHECK(!needs_compare_);
1023 DCHECK(!did_or_);
1024 did_and_ = true;
1025 HEnvironment* env = first_false_block_->last_environment();
1026 if (split_edge_merge_block_ == NULL) {
1027 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
1028 builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
1029 first_false_block_ = split_edge_merge_block_;
1030 }
1031 builder()->set_current_block(first_true_block_);
1032 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
1033}
1034
1035
1036void HGraphBuilder::IfBuilder::CaptureContinuation(
1037 HIfContinuation* continuation) {
1038 DCHECK(!did_else_if_);
1039 DCHECK(!finished_);
1040 DCHECK(!captured_);
1041
1042 HBasicBlock* true_block = NULL;
1043 HBasicBlock* false_block = NULL;
1044 Finish(&true_block, &false_block);
1045 DCHECK(true_block != NULL);
1046 DCHECK(false_block != NULL);
1047 continuation->Capture(true_block, false_block);
1048 captured_ = true;
1049 builder()->set_current_block(NULL);
1050 End();
1051}
1052
1053
1054void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
1055 DCHECK(!did_else_if_);
1056 DCHECK(!finished_);
1057 DCHECK(!captured_);
1058 HBasicBlock* true_block = NULL;
1059 HBasicBlock* false_block = NULL;
1060 Finish(&true_block, &false_block);
1061 merge_at_join_blocks_ = NULL;
1062 if (true_block != NULL && !true_block->IsFinished()) {
1063 DCHECK(continuation->IsTrueReachable());
1064 builder()->GotoNoSimulate(true_block, continuation->true_branch());
1065 }
1066 if (false_block != NULL && !false_block->IsFinished()) {
1067 DCHECK(continuation->IsFalseReachable());
1068 builder()->GotoNoSimulate(false_block, continuation->false_branch());
1069 }
1070 captured_ = true;
1071 End();
1072}
1073
1074
1075void HGraphBuilder::IfBuilder::Then() {
1076 DCHECK(!captured_);
1077 DCHECK(!finished_);
1078 did_then_ = true;
1079 if (needs_compare_) {
1080 // Handle if's without any expressions, they jump directly to the "else"
1081 // branch. However, we must pretend that the "then" branch is reachable,
1082 // so that the graph builder visits it and sees any live range extending
1083 // constructs within it.
1084 HConstant* constant_false = builder()->graph()->GetConstantFalse();
Ben Murdochda12d292016-06-02 14:46:10 +01001085 ToBooleanICStub::Types boolean_type = ToBooleanICStub::Types();
1086 boolean_type.Add(ToBooleanICStub::BOOLEAN);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087 HBranch* branch = builder()->New<HBranch>(
1088 constant_false, boolean_type, first_true_block_, first_false_block_);
1089 builder()->FinishCurrentBlock(branch);
1090 }
1091 builder()->set_current_block(first_true_block_);
1092 pending_merge_block_ = true;
1093}
1094
1095
1096void HGraphBuilder::IfBuilder::Else() {
1097 DCHECK(did_then_);
1098 DCHECK(!captured_);
1099 DCHECK(!finished_);
1100 AddMergeAtJoinBlock(false);
1101 builder()->set_current_block(first_false_block_);
1102 pending_merge_block_ = true;
1103 did_else_ = true;
1104}
1105
1106
1107void HGraphBuilder::IfBuilder::Deopt(Deoptimizer::DeoptReason reason) {
1108 DCHECK(did_then_);
1109 builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1110 AddMergeAtJoinBlock(true);
1111}
1112
1113
1114void HGraphBuilder::IfBuilder::Return(HValue* value) {
1115 HValue* parameter_count = builder()->graph()->GetConstantMinus1();
1116 builder()->FinishExitCurrentBlock(
1117 builder()->New<HReturn>(value, parameter_count));
1118 AddMergeAtJoinBlock(false);
1119}
1120
1121
1122void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
1123 if (!pending_merge_block_) return;
1124 HBasicBlock* block = builder()->current_block();
1125 DCHECK(block == NULL || !block->IsFinished());
1126 MergeAtJoinBlock* record = new (builder()->zone())
1127 MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
1128 merge_at_join_blocks_ = record;
1129 if (block != NULL) {
1130 DCHECK(block->end() == NULL);
1131 if (deopt) {
1132 normal_merge_at_join_block_count_++;
1133 } else {
1134 deopt_merge_at_join_block_count_++;
1135 }
1136 }
1137 builder()->set_current_block(NULL);
1138 pending_merge_block_ = false;
1139}
1140
1141
1142void HGraphBuilder::IfBuilder::Finish() {
1143 DCHECK(!finished_);
1144 if (!did_then_) {
1145 Then();
1146 }
1147 AddMergeAtJoinBlock(false);
1148 if (!did_else_) {
1149 Else();
1150 AddMergeAtJoinBlock(false);
1151 }
1152 finished_ = true;
1153}
1154
1155
1156void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
1157 HBasicBlock** else_continuation) {
1158 Finish();
1159
1160 MergeAtJoinBlock* else_record = merge_at_join_blocks_;
1161 if (else_continuation != NULL) {
1162 *else_continuation = else_record->block_;
1163 }
1164 MergeAtJoinBlock* then_record = else_record->next_;
1165 if (then_continuation != NULL) {
1166 *then_continuation = then_record->block_;
1167 }
1168 DCHECK(then_record->next_ == NULL);
1169}
1170
1171
1172void HGraphBuilder::IfBuilder::EndUnreachable() {
1173 if (captured_) return;
1174 Finish();
1175 builder()->set_current_block(nullptr);
1176}
1177
1178
1179void HGraphBuilder::IfBuilder::End() {
1180 if (captured_) return;
1181 Finish();
1182
1183 int total_merged_blocks = normal_merge_at_join_block_count_ +
1184 deopt_merge_at_join_block_count_;
1185 DCHECK(total_merged_blocks >= 1);
1186 HBasicBlock* merge_block =
1187 total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
1188
1189 // Merge non-deopt blocks first to ensure environment has right size for
1190 // padding.
1191 MergeAtJoinBlock* current = merge_at_join_blocks_;
1192 while (current != NULL) {
1193 if (!current->deopt_ && current->block_ != NULL) {
1194 // If there is only one block that makes it through to the end of the
1195 // if, then just set it as the current block and continue rather then
1196 // creating an unnecessary merge block.
1197 if (total_merged_blocks == 1) {
1198 builder()->set_current_block(current->block_);
1199 return;
1200 }
1201 builder()->GotoNoSimulate(current->block_, merge_block);
1202 }
1203 current = current->next_;
1204 }
1205
1206 // Merge deopt blocks, padding when necessary.
1207 current = merge_at_join_blocks_;
1208 while (current != NULL) {
1209 if (current->deopt_ && current->block_ != NULL) {
1210 current->block_->FinishExit(
1211 HAbnormalExit::New(builder()->isolate(), builder()->zone(), NULL),
1212 SourcePosition::Unknown());
1213 }
1214 current = current->next_;
1215 }
1216 builder()->set_current_block(merge_block);
1217}
1218
1219
1220HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
1221 Initialize(builder, NULL, kWhileTrue, NULL);
1222}
1223
1224
1225HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1226 LoopBuilder::Direction direction) {
1227 Initialize(builder, context, direction, builder->graph()->GetConstant1());
1228}
1229
1230
1231HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1232 LoopBuilder::Direction direction,
1233 HValue* increment_amount) {
1234 Initialize(builder, context, direction, increment_amount);
1235 increment_amount_ = increment_amount;
1236}
1237
1238
1239void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
1240 HValue* context,
1241 Direction direction,
1242 HValue* increment_amount) {
1243 builder_ = builder;
1244 context_ = context;
1245 direction_ = direction;
1246 increment_amount_ = increment_amount;
1247
1248 finished_ = false;
1249 header_block_ = builder->CreateLoopHeaderBlock();
1250 body_block_ = NULL;
1251 exit_block_ = NULL;
1252 exit_trampoline_block_ = NULL;
1253}
1254
1255
1256HValue* HGraphBuilder::LoopBuilder::BeginBody(
1257 HValue* initial,
1258 HValue* terminating,
1259 Token::Value token) {
1260 DCHECK(direction_ != kWhileTrue);
1261 HEnvironment* env = builder_->environment();
1262 phi_ = header_block_->AddNewPhi(env->values()->length());
1263 phi_->AddInput(initial);
1264 env->Push(initial);
1265 builder_->GotoNoSimulate(header_block_);
1266
1267 HEnvironment* body_env = env->Copy();
1268 HEnvironment* exit_env = env->Copy();
1269 // Remove the phi from the expression stack
1270 body_env->Pop();
1271 exit_env->Pop();
1272 body_block_ = builder_->CreateBasicBlock(body_env);
1273 exit_block_ = builder_->CreateBasicBlock(exit_env);
1274
1275 builder_->set_current_block(header_block_);
1276 env->Pop();
1277 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1278 phi_, terminating, token, body_block_, exit_block_));
1279
1280 builder_->set_current_block(body_block_);
1281 if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1282 Isolate* isolate = builder_->isolate();
1283 HValue* one = builder_->graph()->GetConstant1();
1284 if (direction_ == kPreIncrement) {
1285 increment_ = HAdd::New(isolate, zone(), context_, phi_, one);
1286 } else {
1287 increment_ = HSub::New(isolate, zone(), context_, phi_, one);
1288 }
1289 increment_->ClearFlag(HValue::kCanOverflow);
1290 builder_->AddInstruction(increment_);
1291 return increment_;
1292 } else {
1293 return phi_;
1294 }
1295}
1296
1297
1298void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
1299 DCHECK(direction_ == kWhileTrue);
1300 HEnvironment* env = builder_->environment();
1301 builder_->GotoNoSimulate(header_block_);
1302 builder_->set_current_block(header_block_);
1303 env->Drop(drop_count);
1304}
1305
1306
1307void HGraphBuilder::LoopBuilder::Break() {
1308 if (exit_trampoline_block_ == NULL) {
1309 // Its the first time we saw a break.
1310 if (direction_ == kWhileTrue) {
1311 HEnvironment* env = builder_->environment()->Copy();
1312 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1313 } else {
1314 HEnvironment* env = exit_block_->last_environment()->Copy();
1315 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1316 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1317 }
1318 }
1319
1320 builder_->GotoNoSimulate(exit_trampoline_block_);
1321 builder_->set_current_block(NULL);
1322}
1323
1324
1325void HGraphBuilder::LoopBuilder::EndBody() {
1326 DCHECK(!finished_);
1327
1328 if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1329 Isolate* isolate = builder_->isolate();
1330 if (direction_ == kPostIncrement) {
1331 increment_ =
1332 HAdd::New(isolate, zone(), context_, phi_, increment_amount_);
1333 } else {
1334 increment_ =
1335 HSub::New(isolate, zone(), context_, phi_, increment_amount_);
1336 }
1337 increment_->ClearFlag(HValue::kCanOverflow);
1338 builder_->AddInstruction(increment_);
1339 }
1340
1341 if (direction_ != kWhileTrue) {
1342 // Push the new increment value on the expression stack to merge into
1343 // the phi.
1344 builder_->environment()->Push(increment_);
1345 }
1346 HBasicBlock* last_block = builder_->current_block();
1347 builder_->GotoNoSimulate(last_block, header_block_);
1348 header_block_->loop_information()->RegisterBackEdge(last_block);
1349
1350 if (exit_trampoline_block_ != NULL) {
1351 builder_->set_current_block(exit_trampoline_block_);
1352 } else {
1353 builder_->set_current_block(exit_block_);
1354 }
1355 finished_ = true;
1356}
1357
1358
1359HGraph* HGraphBuilder::CreateGraph() {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001360 graph_ = new (zone()) HGraph(info_, descriptor_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
Ben Murdochc5610432016-08-08 18:44:38 +01001362 if (!info_->IsStub() && info_->is_tracking_positions()) {
1363 TraceInlinedFunction(info_->shared_info(), SourcePosition::Unknown());
1364 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001365 CompilationPhase phase("H_Block building", info_);
1366 set_current_block(graph()->entry_block());
1367 if (!BuildGraph()) return NULL;
1368 graph()->FinalizeUniqueness();
1369 return graph_;
1370}
1371
Ben Murdochc5610432016-08-08 18:44:38 +01001372int HGraphBuilder::TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
1373 SourcePosition position) {
1374 DCHECK(info_->is_tracking_positions());
1375
1376 int inline_id = static_cast<int>(graph()->inlined_function_infos().size());
1377 HInlinedFunctionInfo info(shared->start_position());
1378 if (!shared->script()->IsUndefined()) {
1379 Handle<Script> script(Script::cast(shared->script()));
1380
1381 if (FLAG_hydrogen_track_positions && !script->source()->IsUndefined()) {
1382 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
1383 Object* source_name = script->name();
1384 OFStream os(tracing_scope.file());
1385 os << "--- FUNCTION SOURCE (";
1386 if (source_name->IsString()) {
1387 os << String::cast(source_name)->ToCString().get() << ":";
1388 }
1389 os << shared->DebugName()->ToCString().get() << ") id{";
1390 os << info_->optimization_id() << "," << inline_id << "} ---\n";
1391 {
1392 DisallowHeapAllocation no_allocation;
1393 int start = shared->start_position();
1394 int len = shared->end_position() - start;
1395 String::SubStringRange source(String::cast(script->source()), start,
1396 len);
1397 for (const auto& c : source) {
1398 os << AsReversiblyEscapedUC16(c);
1399 }
1400 }
1401
1402 os << "\n--- END ---\n";
1403 }
1404 }
1405
1406 graph()->inlined_function_infos().push_back(info);
1407
1408 if (FLAG_hydrogen_track_positions && inline_id != 0) {
1409 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
1410 OFStream os(tracing_scope.file());
1411 os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
1412 << info_->optimization_id() << "," << inline_id << "} AS " << inline_id
1413 << " AT " << position << std::endl;
1414 }
1415
1416 return inline_id;
1417}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418
1419HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1420 DCHECK(current_block() != NULL);
1421 DCHECK(!FLAG_hydrogen_track_positions ||
1422 !position_.IsUnknown() ||
1423 !info_->IsOptimizing());
1424 current_block()->AddInstruction(instr, source_position());
1425 if (graph()->IsInsideNoSideEffectsScope()) {
1426 instr->SetFlag(HValue::kHasNoObservableSideEffects);
1427 }
1428 return instr;
1429}
1430
1431
1432void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1433 DCHECK(!FLAG_hydrogen_track_positions ||
1434 !info_->IsOptimizing() ||
1435 !position_.IsUnknown());
1436 current_block()->Finish(last, source_position());
1437 if (last->IsReturn() || last->IsAbnormalExit()) {
1438 set_current_block(NULL);
1439 }
1440}
1441
1442
1443void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1444 DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1445 !position_.IsUnknown());
1446 current_block()->FinishExit(instruction, source_position());
1447 if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1448 set_current_block(NULL);
1449 }
1450}
1451
1452
1453void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1454 if (FLAG_native_code_counters && counter->Enabled()) {
1455 HValue* reference = Add<HConstant>(ExternalReference(counter));
1456 HValue* old_value =
1457 Add<HLoadNamedField>(reference, nullptr, HObjectAccess::ForCounter());
1458 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1459 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
1460 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1461 new_value, STORE_TO_INITIALIZED_ENTRY);
1462 }
1463}
1464
1465
1466void HGraphBuilder::AddSimulate(BailoutId id,
1467 RemovableSimulate removable) {
1468 DCHECK(current_block() != NULL);
1469 DCHECK(!graph()->IsInsideNoSideEffectsScope());
1470 current_block()->AddNewSimulate(id, source_position(), removable);
1471}
1472
1473
1474HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1475 HBasicBlock* b = graph()->CreateBasicBlock();
1476 b->SetInitialEnvironment(env);
1477 return b;
1478}
1479
1480
1481HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1482 HBasicBlock* header = graph()->CreateBasicBlock();
1483 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1484 header->SetInitialEnvironment(entry_env);
1485 header->AttachLoopInformation();
1486 return header;
1487}
1488
1489
1490HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
1491 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
1492
1493 HValue* bit_field2 =
1494 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
1495 return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
1496}
1497
1498
Ben Murdoch097c5b22016-05-18 11:27:45 +01001499HValue* HGraphBuilder::BuildEnumLength(HValue* map) {
1500 NoObservableSideEffectsScope scope(this);
1501 HValue* bit_field3 =
1502 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
1503 return BuildDecodeField<Map::EnumLengthBits>(bit_field3);
1504}
1505
1506
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001507HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1508 if (obj->type().IsHeapObject()) return obj;
1509 return Add<HCheckHeapObject>(obj);
1510}
1511
1512
1513void HGraphBuilder::FinishExitWithHardDeoptimization(
1514 Deoptimizer::DeoptReason reason) {
1515 Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1516 FinishExitCurrentBlock(New<HAbnormalExit>());
1517}
1518
1519
1520HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1521 if (!string->type().IsString()) {
1522 DCHECK(!string->IsConstant() ||
1523 !HConstant::cast(string)->HasStringValue());
1524 BuildCheckHeapObject(string);
1525 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1526 }
1527 return string;
1528}
1529
Ben Murdochda12d292016-06-02 14:46:10 +01001530HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* checked) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001531 if (object->type().IsJSObject()) return object;
Ben Murdochda12d292016-06-02 14:46:10 +01001532 HValue* function = checked->ActualValue();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001533 if (function->IsConstant() &&
1534 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1535 Handle<JSFunction> f = Handle<JSFunction>::cast(
1536 HConstant::cast(function)->handle(isolate()));
1537 SharedFunctionInfo* shared = f->shared();
1538 if (is_strict(shared->language_mode()) || shared->native()) return object;
1539 }
Ben Murdochda12d292016-06-02 14:46:10 +01001540 return Add<HWrapReceiver>(object, checked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541}
1542
1543
1544HValue* HGraphBuilder::BuildCheckAndGrowElementsCapacity(
1545 HValue* object, HValue* elements, ElementsKind kind, HValue* length,
1546 HValue* capacity, HValue* key) {
1547 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1548 HValue* max_capacity = AddUncasted<HAdd>(capacity, max_gap);
1549 Add<HBoundsCheck>(key, max_capacity);
1550
1551 HValue* new_capacity = BuildNewElementsCapacity(key);
1552 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind,
1553 length, new_capacity);
1554 return new_elements;
1555}
1556
1557
1558HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1559 HValue* object,
1560 HValue* elements,
1561 ElementsKind kind,
1562 HValue* length,
1563 HValue* key,
1564 bool is_js_array,
1565 PropertyAccessType access_type) {
1566 IfBuilder length_checker(this);
1567
1568 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1569 length_checker.If<HCompareNumericAndBranch>(key, length, token);
1570
1571 length_checker.Then();
1572
1573 HValue* current_capacity = AddLoadFixedArrayLength(elements);
1574
1575 if (top_info()->IsStub()) {
1576 IfBuilder capacity_checker(this);
1577 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1578 Token::GTE);
1579 capacity_checker.Then();
1580 HValue* new_elements = BuildCheckAndGrowElementsCapacity(
1581 object, elements, kind, length, current_capacity, key);
1582 environment()->Push(new_elements);
1583 capacity_checker.Else();
1584 environment()->Push(elements);
1585 capacity_checker.End();
1586 } else {
1587 HValue* result = Add<HMaybeGrowElements>(
1588 object, elements, key, current_capacity, is_js_array, kind);
1589 environment()->Push(result);
1590 }
1591
1592 if (is_js_array) {
1593 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1594 new_length->ClearFlag(HValue::kCanOverflow);
1595
1596 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1597 new_length);
1598 }
1599
1600 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1601 HValue* checked_elements = environment()->Top();
1602
1603 // Write zero to ensure that the new element is initialized with some smi.
1604 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), nullptr,
1605 kind);
1606 }
1607
1608 length_checker.Else();
1609 Add<HBoundsCheck>(key, length);
1610
1611 environment()->Push(elements);
1612 length_checker.End();
1613
1614 return environment()->Pop();
1615}
1616
1617
1618HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1619 HValue* elements,
1620 ElementsKind kind,
1621 HValue* length) {
1622 Factory* factory = isolate()->factory();
1623
1624 IfBuilder cow_checker(this);
1625
1626 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1627 cow_checker.Then();
1628
1629 HValue* capacity = AddLoadFixedArrayLength(elements);
1630
1631 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1632 kind, length, capacity);
1633
1634 environment()->Push(new_elements);
1635
1636 cow_checker.Else();
1637
1638 environment()->Push(elements);
1639
1640 cow_checker.End();
1641
1642 return environment()->Pop();
1643}
1644
1645
1646void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1647 HValue* map,
1648 ElementsKind from_kind,
1649 ElementsKind to_kind,
1650 bool is_jsarray) {
1651 DCHECK(!IsFastHoleyElementsKind(from_kind) ||
1652 IsFastHoleyElementsKind(to_kind));
1653
1654 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1655 Add<HTrapAllocationMemento>(object);
1656 }
1657
1658 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1659 HInstruction* elements = AddLoadElements(object);
1660
1661 HInstruction* empty_fixed_array = Add<HConstant>(
1662 isolate()->factory()->empty_fixed_array());
1663
1664 IfBuilder if_builder(this);
1665
1666 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1667
1668 if_builder.Then();
1669
1670 HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1671
1672 HInstruction* array_length =
1673 is_jsarray
1674 ? Add<HLoadNamedField>(object, nullptr,
1675 HObjectAccess::ForArrayLength(from_kind))
1676 : elements_length;
1677
1678 BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1679 array_length, elements_length);
1680
1681 if_builder.End();
1682 }
1683
1684 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1685}
1686
1687
1688void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
1689 int bit_field_mask) {
1690 // Check that the object isn't a smi.
1691 Add<HCheckHeapObject>(receiver);
1692
1693 // Get the map of the receiver.
1694 HValue* map =
1695 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1696
1697 // Check the instance type and if an access check is needed, this can be
1698 // done with a single load, since both bytes are adjacent in the map.
1699 HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
1700 HValue* instance_type_and_bit_field =
1701 Add<HLoadNamedField>(map, nullptr, access);
1702
1703 HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
1704 HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
1705 instance_type_and_bit_field,
1706 mask);
1707 HValue* sub_result = AddUncasted<HSub>(and_result,
1708 Add<HConstant>(JS_OBJECT_TYPE));
1709 Add<HBoundsCheck>(sub_result,
1710 Add<HConstant>(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE));
1711}
1712
1713
1714void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
1715 HIfContinuation* join_continuation) {
1716 // The sometimes unintuitively backward ordering of the ifs below is
1717 // convoluted, but necessary. All of the paths must guarantee that the
1718 // if-true of the continuation returns a smi element index and the if-false of
1719 // the continuation returns either a symbol or a unique string key. All other
1720 // object types cause a deopt to fall back to the runtime.
1721
1722 IfBuilder key_smi_if(this);
1723 key_smi_if.If<HIsSmiAndBranch>(key);
1724 key_smi_if.Then();
1725 {
1726 Push(key); // Nothing to do, just continue to true of continuation.
1727 }
1728 key_smi_if.Else();
1729 {
1730 HValue* map = Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForMap());
1731 HValue* instance_type =
1732 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1733
1734 // Non-unique string, check for a string with a hash code that is actually
1735 // an index.
1736 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
1737 IfBuilder not_string_or_name_if(this);
1738 not_string_or_name_if.If<HCompareNumericAndBranch>(
1739 instance_type,
1740 Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
1741 Token::GT);
1742
1743 not_string_or_name_if.Then();
1744 {
1745 // Non-smi, non-Name, non-String: Try to convert to smi in case of
1746 // HeapNumber.
1747 // TODO(danno): This could call some variant of ToString
1748 Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
1749 }
1750 not_string_or_name_if.Else();
1751 {
1752 // String or Name: check explicitly for Name, they can short-circuit
1753 // directly to unique non-index key path.
1754 IfBuilder not_symbol_if(this);
1755 not_symbol_if.If<HCompareNumericAndBranch>(
1756 instance_type,
1757 Add<HConstant>(SYMBOL_TYPE),
1758 Token::NE);
1759
1760 not_symbol_if.Then();
1761 {
1762 // String: check whether the String is a String of an index. If it is,
1763 // extract the index value from the hash.
1764 HValue* hash = Add<HLoadNamedField>(key, nullptr,
1765 HObjectAccess::ForNameHashField());
1766 HValue* not_index_mask = Add<HConstant>(static_cast<int>(
1767 String::kContainsCachedArrayIndexMask));
1768
1769 HValue* not_index_test = AddUncasted<HBitwise>(
1770 Token::BIT_AND, hash, not_index_mask);
1771
1772 IfBuilder string_index_if(this);
1773 string_index_if.If<HCompareNumericAndBranch>(not_index_test,
1774 graph()->GetConstant0(),
1775 Token::EQ);
1776 string_index_if.Then();
1777 {
1778 // String with index in hash: extract string and merge to index path.
1779 Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
1780 }
1781 string_index_if.Else();
1782 {
1783 // Key is a non-index String, check for uniqueness/internalization.
1784 // If it's not internalized yet, internalize it now.
1785 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1786 Token::BIT_AND,
1787 instance_type,
1788 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1789
1790 IfBuilder internalized(this);
1791 internalized.If<HCompareNumericAndBranch>(not_internalized_bit,
1792 graph()->GetConstant0(),
1793 Token::EQ);
1794 internalized.Then();
1795 Push(key);
1796
1797 internalized.Else();
1798 Add<HPushArguments>(key);
1799 HValue* intern_key = Add<HCallRuntime>(
1800 Runtime::FunctionForId(Runtime::kInternalizeString), 1);
1801 Push(intern_key);
1802
1803 internalized.End();
1804 // Key guaranteed to be a unique string
1805 }
1806 string_index_if.JoinContinuation(join_continuation);
1807 }
1808 not_symbol_if.Else();
1809 {
1810 Push(key); // Key is symbol
1811 }
1812 not_symbol_if.JoinContinuation(join_continuation);
1813 }
1814 not_string_or_name_if.JoinContinuation(join_continuation);
1815 }
1816 key_smi_if.JoinContinuation(join_continuation);
1817}
1818
1819
1820void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
1821 // Get the the instance type of the receiver, and make sure that it is
1822 // not one of the global object types.
1823 HValue* map =
1824 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1825 HValue* instance_type =
1826 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1827 HValue* global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
1828
1829 IfBuilder if_global_object(this);
1830 if_global_object.If<HCompareNumericAndBranch>(instance_type, global_type,
1831 Token::EQ);
1832 if_global_object.ThenDeopt(Deoptimizer::kReceiverWasAGlobalObject);
1833 if_global_object.End();
1834}
1835
1836
1837void HGraphBuilder::BuildTestForDictionaryProperties(
1838 HValue* object,
1839 HIfContinuation* continuation) {
1840 HValue* properties = Add<HLoadNamedField>(
1841 object, nullptr, HObjectAccess::ForPropertiesPointer());
1842 HValue* properties_map =
1843 Add<HLoadNamedField>(properties, nullptr, HObjectAccess::ForMap());
1844 HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
1845 IfBuilder builder(this);
1846 builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
1847 builder.CaptureContinuation(continuation);
1848}
1849
1850
1851HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
1852 HValue* key) {
1853 // Load the map of the receiver, compute the keyed lookup cache hash
1854 // based on 32 bits of the map pointer and the string hash.
1855 HValue* object_map =
1856 Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMapAsInteger32());
1857 HValue* shifted_map = AddUncasted<HShr>(
1858 object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
1859 HValue* string_hash =
1860 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForStringHashField());
1861 HValue* shifted_hash = AddUncasted<HShr>(
1862 string_hash, Add<HConstant>(String::kHashShift));
1863 HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
1864 shifted_hash);
1865 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
1866 return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
1867 Add<HConstant>(mask));
1868}
1869
1870
1871HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1872 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1873 HValue* seed = Add<HConstant>(seed_value);
1874 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1875
1876 // hash = ~hash + (hash << 15);
1877 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1878 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1879 graph()->GetConstantMinus1());
1880 hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1881
1882 // hash = hash ^ (hash >> 12);
1883 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1884 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1885
1886 // hash = hash + (hash << 2);
1887 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1888 hash = AddUncasted<HAdd>(hash, shifted_hash);
1889
1890 // hash = hash ^ (hash >> 4);
1891 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1892 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1893
1894 // hash = hash * 2057;
1895 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1896 hash->ClearFlag(HValue::kCanOverflow);
1897
1898 // hash = hash ^ (hash >> 16);
1899 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1900 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1901}
1902
Ben Murdoch097c5b22016-05-18 11:27:45 +01001903HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1904 HValue* elements,
1905 HValue* key,
1906 HValue* hash) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001907 HValue* capacity =
1908 Add<HLoadKeyed>(elements, Add<HConstant>(NameDictionary::kCapacityIndex),
1909 nullptr, nullptr, FAST_ELEMENTS);
1910
1911 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1912 mask->ChangeRepresentation(Representation::Integer32());
1913 mask->ClearFlag(HValue::kCanOverflow);
1914
1915 HValue* entry = hash;
1916 HValue* count = graph()->GetConstant1();
1917 Push(entry);
1918 Push(count);
1919
1920 HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
1921 graph()->CreateBasicBlock());
1922 HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
1923 graph()->CreateBasicBlock());
1924 LoopBuilder probe_loop(this);
1925 probe_loop.BeginBody(2); // Drop entry, count from last environment to
1926 // appease live range building without simulates.
1927
1928 count = Pop();
1929 entry = Pop();
1930 entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
1931 int entry_size = SeededNumberDictionary::kEntrySize;
1932 HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
1933 base_index->ClearFlag(HValue::kCanOverflow);
1934 int start_offset = SeededNumberDictionary::kElementsStartIndex;
1935 HValue* key_index =
1936 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
1937 key_index->ClearFlag(HValue::kCanOverflow);
1938
1939 HValue* candidate_key =
1940 Add<HLoadKeyed>(elements, key_index, nullptr, nullptr, FAST_ELEMENTS);
1941 IfBuilder if_undefined(this);
1942 if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
1943 graph()->GetConstantUndefined());
1944 if_undefined.Then();
1945 {
1946 // element == undefined means "not found". Call the runtime.
1947 // TODO(jkummerow): walk the prototype chain instead.
1948 Add<HPushArguments>(receiver, key);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001949 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1950 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001951 }
1952 if_undefined.Else();
1953 {
1954 IfBuilder if_match(this);
1955 if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
1956 if_match.Then();
1957 if_match.Else();
1958
1959 // Update non-internalized string in the dictionary with internalized key?
1960 IfBuilder if_update_with_internalized(this);
1961 HValue* smi_check =
1962 if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
1963 if_update_with_internalized.And();
1964 HValue* map = AddLoadMap(candidate_key, smi_check);
1965 HValue* instance_type =
1966 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1967 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1968 Token::BIT_AND, instance_type,
1969 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1970 if_update_with_internalized.If<HCompareNumericAndBranch>(
1971 not_internalized_bit, graph()->GetConstant0(), Token::NE);
1972 if_update_with_internalized.And();
1973 if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
1974 candidate_key, graph()->GetConstantHole());
1975 if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
1976 key, Token::EQ);
1977 if_update_with_internalized.Then();
1978 // Replace a key that is a non-internalized string by the equivalent
1979 // internalized string for faster further lookups.
1980 Add<HStoreKeyed>(elements, key_index, key, nullptr, FAST_ELEMENTS);
1981 if_update_with_internalized.Else();
1982
1983 if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
1984 if_match.JoinContinuation(&found_key_match_continuation);
1985
1986 IfBuilder found_key_match(this, &found_key_match_continuation);
1987 found_key_match.Then();
1988 // Key at current probe matches. Relevant bits in the |details| field must
1989 // be zero, otherwise the dictionary element requires special handling.
1990 HValue* details_index =
1991 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
1992 details_index->ClearFlag(HValue::kCanOverflow);
1993 HValue* details = Add<HLoadKeyed>(elements, details_index, nullptr, nullptr,
1994 FAST_ELEMENTS);
1995 int details_mask = PropertyDetails::TypeField::kMask;
1996 details = AddUncasted<HBitwise>(Token::BIT_AND, details,
1997 Add<HConstant>(details_mask));
1998 IfBuilder details_compare(this);
1999 details_compare.If<HCompareNumericAndBranch>(
2000 details, graph()->GetConstant0(), Token::EQ);
2001 details_compare.Then();
2002 HValue* result_index =
2003 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
2004 result_index->ClearFlag(HValue::kCanOverflow);
2005 Push(Add<HLoadKeyed>(elements, result_index, nullptr, nullptr,
2006 FAST_ELEMENTS));
2007 details_compare.Else();
2008 Add<HPushArguments>(receiver, key);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002009 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
2010 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002011 details_compare.End();
2012
2013 found_key_match.Else();
2014 found_key_match.JoinContinuation(&return_or_loop_continuation);
2015 }
2016 if_undefined.JoinContinuation(&return_or_loop_continuation);
2017
2018 IfBuilder return_or_loop(this, &return_or_loop_continuation);
2019 return_or_loop.Then();
2020 probe_loop.Break();
2021
2022 return_or_loop.Else();
2023 entry = AddUncasted<HAdd>(entry, count);
2024 entry->ClearFlag(HValue::kCanOverflow);
2025 count = AddUncasted<HAdd>(count, graph()->GetConstant1());
2026 count->ClearFlag(HValue::kCanOverflow);
2027 Push(entry);
2028 Push(count);
2029
2030 probe_loop.EndBody();
2031
2032 return_or_loop.End();
2033
2034 return Pop();
2035}
2036
2037
2038HValue* HGraphBuilder::BuildCreateIterResultObject(HValue* value,
2039 HValue* done) {
2040 NoObservableSideEffectsScope scope(this);
2041
2042 // Allocate the JSIteratorResult object.
2043 HValue* result =
2044 Add<HAllocate>(Add<HConstant>(JSIteratorResult::kSize), HType::JSObject(),
Ben Murdochc5610432016-08-08 18:44:38 +01002045 NOT_TENURED, JS_OBJECT_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002046
2047 // Initialize the JSIteratorResult object.
2048 HValue* native_context = BuildGetNativeContext();
2049 HValue* map = Add<HLoadNamedField>(
2050 native_context, nullptr,
2051 HObjectAccess::ForContextSlot(Context::ITERATOR_RESULT_MAP_INDEX));
2052 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2053 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2054 Add<HStoreNamedField>(result, HObjectAccess::ForPropertiesPointer(),
2055 empty_fixed_array);
2056 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
2057 empty_fixed_array);
2058 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
2059 JSIteratorResult::kValueOffset),
2060 value);
2061 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
2062 JSIteratorResult::kDoneOffset),
2063 done);
2064 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2065 return result;
2066}
2067
2068
2069HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
2070 HValue* index,
2071 HValue* input) {
2072 NoObservableSideEffectsScope scope(this);
2073 HConstant* max_length = Add<HConstant>(JSArray::kInitialMaxFastElementArray);
2074 Add<HBoundsCheck>(length, max_length);
2075
2076 // Generate size calculation code here in order to make it dominate
2077 // the JSRegExpResult allocation.
2078 ElementsKind elements_kind = FAST_ELEMENTS;
2079 HValue* size = BuildCalculateElementsSize(elements_kind, length);
2080
2081 // Allocate the JSRegExpResult and the FixedArray in one step.
Ben Murdochc5610432016-08-08 18:44:38 +01002082 HValue* result =
2083 Add<HAllocate>(Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(),
2084 NOT_TENURED, JS_ARRAY_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002085
2086 // Initialize the JSRegExpResult header.
2087 HValue* native_context = Add<HLoadNamedField>(
2088 context(), nullptr,
2089 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
2090 Add<HStoreNamedField>(
2091 result, HObjectAccess::ForMap(),
2092 Add<HLoadNamedField>(
2093 native_context, nullptr,
2094 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
2095 HConstant* empty_fixed_array =
2096 Add<HConstant>(isolate()->factory()->empty_fixed_array());
2097 Add<HStoreNamedField>(
2098 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
2099 empty_fixed_array);
2100 Add<HStoreNamedField>(
2101 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
2102 empty_fixed_array);
2103 Add<HStoreNamedField>(
2104 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
2105
2106 // Initialize the additional fields.
2107 Add<HStoreNamedField>(
2108 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
2109 index);
2110 Add<HStoreNamedField>(
2111 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
2112 input);
2113
2114 // Allocate and initialize the elements header.
2115 HAllocate* elements = BuildAllocateElements(elements_kind, size);
2116 BuildInitializeElementsHeader(elements, elements_kind, length);
2117
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002118 Add<HStoreNamedField>(
2119 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
2120 elements);
2121
2122 // Initialize the elements contents with undefined.
2123 BuildFillElementsWithValue(
2124 elements, elements_kind, graph()->GetConstant0(), length,
2125 graph()->GetConstantUndefined());
2126
2127 return result;
2128}
2129
2130
2131HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
2132 NoObservableSideEffectsScope scope(this);
2133
2134 // Convert constant numbers at compile time.
2135 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
2136 Handle<Object> number = HConstant::cast(object)->handle(isolate());
2137 Handle<String> result = isolate()->factory()->NumberToString(number);
2138 return Add<HConstant>(result);
2139 }
2140
2141 // Create a joinable continuation.
2142 HIfContinuation found(graph()->CreateBasicBlock(),
2143 graph()->CreateBasicBlock());
2144
2145 // Load the number string cache.
2146 HValue* number_string_cache =
2147 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
2148
2149 // Make the hash mask from the length of the number string cache. It
2150 // contains two elements (number and string) for each cache entry.
2151 HValue* mask = AddLoadFixedArrayLength(number_string_cache);
2152 mask->set_type(HType::Smi());
2153 mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
2154 mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
2155
2156 // Check whether object is a smi.
2157 IfBuilder if_objectissmi(this);
2158 if_objectissmi.If<HIsSmiAndBranch>(object);
2159 if_objectissmi.Then();
2160 {
2161 // Compute hash for smi similar to smi_get_hash().
2162 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
2163
2164 // Load the key.
2165 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
2166 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, nullptr,
2167 nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2168
2169 // Check if object == key.
2170 IfBuilder if_objectiskey(this);
2171 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
2172 if_objectiskey.Then();
2173 {
2174 // Make the key_index available.
2175 Push(key_index);
2176 }
2177 if_objectiskey.JoinContinuation(&found);
2178 }
2179 if_objectissmi.Else();
2180 {
2181 if (type->Is(Type::SignedSmall())) {
2182 if_objectissmi.Deopt(Deoptimizer::kExpectedSmi);
2183 } else {
2184 // Check if the object is a heap number.
2185 IfBuilder if_objectisnumber(this);
2186 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
2187 object, isolate()->factory()->heap_number_map());
2188 if_objectisnumber.Then();
2189 {
2190 // Compute hash for heap number similar to double_get_hash().
2191 HValue* low = Add<HLoadNamedField>(
2192 object, objectisnumber,
2193 HObjectAccess::ForHeapNumberValueLowestBits());
2194 HValue* high = Add<HLoadNamedField>(
2195 object, objectisnumber,
2196 HObjectAccess::ForHeapNumberValueHighestBits());
2197 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
2198 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
2199
2200 // Load the key.
2201 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
2202 HValue* key =
2203 Add<HLoadKeyed>(number_string_cache, key_index, nullptr, nullptr,
2204 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2205
2206 // Check if the key is a heap number and compare it with the object.
2207 IfBuilder if_keyisnotsmi(this);
2208 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
2209 if_keyisnotsmi.Then();
2210 {
2211 IfBuilder if_keyisheapnumber(this);
2212 if_keyisheapnumber.If<HCompareMap>(
2213 key, isolate()->factory()->heap_number_map());
2214 if_keyisheapnumber.Then();
2215 {
2216 // Check if values of key and object match.
2217 IfBuilder if_keyeqobject(this);
2218 if_keyeqobject.If<HCompareNumericAndBranch>(
2219 Add<HLoadNamedField>(key, keyisnotsmi,
2220 HObjectAccess::ForHeapNumberValue()),
2221 Add<HLoadNamedField>(object, objectisnumber,
2222 HObjectAccess::ForHeapNumberValue()),
2223 Token::EQ);
2224 if_keyeqobject.Then();
2225 {
2226 // Make the key_index available.
2227 Push(key_index);
2228 }
2229 if_keyeqobject.JoinContinuation(&found);
2230 }
2231 if_keyisheapnumber.JoinContinuation(&found);
2232 }
2233 if_keyisnotsmi.JoinContinuation(&found);
2234 }
2235 if_objectisnumber.Else();
2236 {
2237 if (type->Is(Type::Number())) {
2238 if_objectisnumber.Deopt(Deoptimizer::kExpectedHeapNumber);
2239 }
2240 }
2241 if_objectisnumber.JoinContinuation(&found);
2242 }
2243 }
2244 if_objectissmi.JoinContinuation(&found);
2245
2246 // Check for cache hit.
2247 IfBuilder if_found(this, &found);
2248 if_found.Then();
2249 {
2250 // Count number to string operation in native code.
2251 AddIncrementCounter(isolate()->counters()->number_to_string_native());
2252
2253 // Load the value in case of cache hit.
2254 HValue* key_index = Pop();
2255 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
2256 Push(Add<HLoadKeyed>(number_string_cache, value_index, nullptr, nullptr,
2257 FAST_ELEMENTS, ALLOW_RETURN_HOLE));
2258 }
2259 if_found.Else();
2260 {
2261 // Cache miss, fallback to runtime.
2262 Add<HPushArguments>(object);
2263 Push(Add<HCallRuntime>(
2264 Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
2265 1));
2266 }
2267 if_found.End();
2268
2269 return Pop();
2270}
2271
Ben Murdoch097c5b22016-05-18 11:27:45 +01002272HValue* HGraphBuilder::BuildToNumber(HValue* input) {
2273 if (input->type().IsTaggedNumber()) {
2274 return input;
2275 }
2276 Callable callable = CodeFactory::ToNumber(isolate());
2277 HValue* stub = Add<HConstant>(callable.code());
2278 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +01002279 HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
2280 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002281 instr->set_type(HType::TaggedNumber());
2282 return instr;
2283}
2284
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002285
2286HValue* HGraphBuilder::BuildToObject(HValue* receiver) {
2287 NoObservableSideEffectsScope scope(this);
2288
2289 // Create a joinable continuation.
2290 HIfContinuation wrap(graph()->CreateBasicBlock(),
2291 graph()->CreateBasicBlock());
2292
2293 // Determine the proper global constructor function required to wrap
2294 // {receiver} into a JSValue, unless {receiver} is already a {JSReceiver}, in
2295 // which case we just return it. Deopts to Runtime::kToObject if {receiver}
2296 // is undefined or null.
2297 IfBuilder receiver_is_smi(this);
2298 receiver_is_smi.If<HIsSmiAndBranch>(receiver);
2299 receiver_is_smi.Then();
2300 {
2301 // Use global Number function.
2302 Push(Add<HConstant>(Context::NUMBER_FUNCTION_INDEX));
2303 }
2304 receiver_is_smi.Else();
2305 {
2306 // Determine {receiver} map and instance type.
2307 HValue* receiver_map =
2308 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2309 HValue* receiver_instance_type = Add<HLoadNamedField>(
2310 receiver_map, nullptr, HObjectAccess::ForMapInstanceType());
2311
2312 // First check whether {receiver} is already a spec object (fast case).
2313 IfBuilder receiver_is_not_spec_object(this);
2314 receiver_is_not_spec_object.If<HCompareNumericAndBranch>(
2315 receiver_instance_type, Add<HConstant>(FIRST_JS_RECEIVER_TYPE),
2316 Token::LT);
2317 receiver_is_not_spec_object.Then();
2318 {
2319 // Load the constructor function index from the {receiver} map.
2320 HValue* constructor_function_index = Add<HLoadNamedField>(
2321 receiver_map, nullptr,
2322 HObjectAccess::ForMapInObjectPropertiesOrConstructorFunctionIndex());
2323
2324 // Check if {receiver} has a constructor (null and undefined have no
2325 // constructors, so we deoptimize to the runtime to throw an exception).
2326 IfBuilder constructor_function_index_is_invalid(this);
2327 constructor_function_index_is_invalid.If<HCompareNumericAndBranch>(
2328 constructor_function_index,
2329 Add<HConstant>(Map::kNoConstructorFunctionIndex), Token::EQ);
2330 constructor_function_index_is_invalid.ThenDeopt(
2331 Deoptimizer::kUndefinedOrNullInToObject);
2332 constructor_function_index_is_invalid.End();
2333
2334 // Use the global constructor function.
2335 Push(constructor_function_index);
2336 }
2337 receiver_is_not_spec_object.JoinContinuation(&wrap);
2338 }
2339 receiver_is_smi.JoinContinuation(&wrap);
2340
2341 // Wrap the receiver if necessary.
2342 IfBuilder if_wrap(this, &wrap);
2343 if_wrap.Then();
2344 {
2345 // Grab the constructor function index.
2346 HValue* constructor_index = Pop();
2347
2348 // Load native context.
2349 HValue* native_context = BuildGetNativeContext();
2350
2351 // Determine the initial map for the global constructor.
2352 HValue* constructor = Add<HLoadKeyed>(native_context, constructor_index,
2353 nullptr, nullptr, FAST_ELEMENTS);
2354 HValue* constructor_initial_map = Add<HLoadNamedField>(
2355 constructor, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
2356 // Allocate and initialize a JSValue wrapper.
2357 HValue* value =
2358 BuildAllocate(Add<HConstant>(JSValue::kSize), HType::JSObject(),
2359 JS_VALUE_TYPE, HAllocationMode());
2360 Add<HStoreNamedField>(value, HObjectAccess::ForMap(),
2361 constructor_initial_map);
2362 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2363 Add<HStoreNamedField>(value, HObjectAccess::ForPropertiesPointer(),
2364 empty_fixed_array);
2365 Add<HStoreNamedField>(value, HObjectAccess::ForElementsPointer(),
2366 empty_fixed_array);
2367 Add<HStoreNamedField>(value, HObjectAccess::ForObservableJSObjectOffset(
2368 JSValue::kValueOffset),
2369 receiver);
2370 Push(value);
2371 }
2372 if_wrap.Else();
2373 { Push(receiver); }
2374 if_wrap.End();
2375 return Pop();
2376}
2377
2378
2379HAllocate* HGraphBuilder::BuildAllocate(
2380 HValue* object_size,
2381 HType type,
2382 InstanceType instance_type,
2383 HAllocationMode allocation_mode) {
2384 // Compute the effective allocation size.
2385 HValue* size = object_size;
2386 if (allocation_mode.CreateAllocationMementos()) {
2387 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
2388 size->ClearFlag(HValue::kCanOverflow);
2389 }
2390
2391 // Perform the actual allocation.
2392 HAllocate* object = Add<HAllocate>(
Ben Murdochc5610432016-08-08 18:44:38 +01002393 size, type, allocation_mode.GetPretenureMode(), instance_type,
2394 graph()->GetConstant0(), allocation_mode.feedback_site());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002395
2396 // Setup the allocation memento.
2397 if (allocation_mode.CreateAllocationMementos()) {
2398 BuildCreateAllocationMemento(
2399 object, object_size, allocation_mode.current_site());
2400 }
2401
2402 return object;
2403}
2404
2405
2406HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
2407 HValue* right_length) {
2408 // Compute the combined string length and check against max string length.
2409 HValue* length = AddUncasted<HAdd>(left_length, right_length);
2410 // Check that length <= kMaxLength <=> length < MaxLength + 1.
2411 HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
2412 Add<HBoundsCheck>(length, max_length);
2413 return length;
2414}
2415
2416
2417HValue* HGraphBuilder::BuildCreateConsString(
2418 HValue* length,
2419 HValue* left,
2420 HValue* right,
2421 HAllocationMode allocation_mode) {
2422 // Determine the string instance types.
2423 HInstruction* left_instance_type = AddLoadStringInstanceType(left);
2424 HInstruction* right_instance_type = AddLoadStringInstanceType(right);
2425
2426 // Allocate the cons string object. HAllocate does not care whether we
2427 // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
2428 // CONS_STRING_TYPE here. Below we decide whether the cons string is
2429 // one-byte or two-byte and set the appropriate map.
2430 DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
2431 CONS_ONE_BYTE_STRING_TYPE));
2432 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
2433 HType::String(), CONS_STRING_TYPE,
2434 allocation_mode);
2435
2436 // Compute intersection and difference of instance types.
2437 HValue* anded_instance_types = AddUncasted<HBitwise>(
2438 Token::BIT_AND, left_instance_type, right_instance_type);
2439 HValue* xored_instance_types = AddUncasted<HBitwise>(
2440 Token::BIT_XOR, left_instance_type, right_instance_type);
2441
2442 // We create a one-byte cons string if
2443 // 1. both strings are one-byte, or
2444 // 2. at least one of the strings is two-byte, but happens to contain only
2445 // one-byte characters.
2446 // To do this, we check
2447 // 1. if both strings are one-byte, or if the one-byte data hint is set in
2448 // both strings, or
2449 // 2. if one of the strings has the one-byte data hint set and the other
2450 // string is one-byte.
2451 IfBuilder if_onebyte(this);
2452 STATIC_ASSERT(kOneByteStringTag != 0);
2453 STATIC_ASSERT(kOneByteDataHintMask != 0);
2454 if_onebyte.If<HCompareNumericAndBranch>(
2455 AddUncasted<HBitwise>(
2456 Token::BIT_AND, anded_instance_types,
2457 Add<HConstant>(static_cast<int32_t>(
2458 kStringEncodingMask | kOneByteDataHintMask))),
2459 graph()->GetConstant0(), Token::NE);
2460 if_onebyte.Or();
2461 STATIC_ASSERT(kOneByteStringTag != 0 &&
2462 kOneByteDataHintTag != 0 &&
2463 kOneByteDataHintTag != kOneByteStringTag);
2464 if_onebyte.If<HCompareNumericAndBranch>(
2465 AddUncasted<HBitwise>(
2466 Token::BIT_AND, xored_instance_types,
2467 Add<HConstant>(static_cast<int32_t>(
2468 kOneByteStringTag | kOneByteDataHintTag))),
2469 Add<HConstant>(static_cast<int32_t>(
2470 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
2471 if_onebyte.Then();
2472 {
2473 // We can safely skip the write barrier for storing the map here.
2474 Add<HStoreNamedField>(
2475 result, HObjectAccess::ForMap(),
2476 Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
2477 }
2478 if_onebyte.Else();
2479 {
2480 // We can safely skip the write barrier for storing the map here.
2481 Add<HStoreNamedField>(
2482 result, HObjectAccess::ForMap(),
2483 Add<HConstant>(isolate()->factory()->cons_string_map()));
2484 }
2485 if_onebyte.End();
2486
2487 // Initialize the cons string fields.
2488 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2489 Add<HConstant>(String::kEmptyHashField));
2490 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2491 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
2492 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
2493
2494 // Count the native string addition.
2495 AddIncrementCounter(isolate()->counters()->string_add_native());
2496
2497 return result;
2498}
2499
2500
2501void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
2502 HValue* src_offset,
2503 String::Encoding src_encoding,
2504 HValue* dst,
2505 HValue* dst_offset,
2506 String::Encoding dst_encoding,
2507 HValue* length) {
2508 DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
2509 src_encoding == String::ONE_BYTE_ENCODING);
2510 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
2511 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
2512 {
2513 HValue* src_index = AddUncasted<HAdd>(src_offset, index);
2514 HValue* value =
2515 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
2516 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
2517 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
2518 }
2519 loop.EndBody();
2520}
2521
2522
2523HValue* HGraphBuilder::BuildObjectSizeAlignment(
2524 HValue* unaligned_size, int header_size) {
2525 DCHECK((header_size & kObjectAlignmentMask) == 0);
2526 HValue* size = AddUncasted<HAdd>(
2527 unaligned_size, Add<HConstant>(static_cast<int32_t>(
2528 header_size + kObjectAlignmentMask)));
2529 size->ClearFlag(HValue::kCanOverflow);
2530 return AddUncasted<HBitwise>(
2531 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
2532 ~kObjectAlignmentMask)));
2533}
2534
2535
2536HValue* HGraphBuilder::BuildUncheckedStringAdd(
2537 HValue* left,
2538 HValue* right,
2539 HAllocationMode allocation_mode) {
2540 // Determine the string lengths.
2541 HValue* left_length = AddLoadStringLength(left);
2542 HValue* right_length = AddLoadStringLength(right);
2543
2544 // Compute the combined string length.
2545 HValue* length = BuildAddStringLengths(left_length, right_length);
2546
2547 // Do some manual constant folding here.
2548 if (left_length->IsConstant()) {
2549 HConstant* c_left_length = HConstant::cast(left_length);
2550 DCHECK_NE(0, c_left_length->Integer32Value());
2551 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2552 // The right string contains at least one character.
2553 return BuildCreateConsString(length, left, right, allocation_mode);
2554 }
2555 } else if (right_length->IsConstant()) {
2556 HConstant* c_right_length = HConstant::cast(right_length);
2557 DCHECK_NE(0, c_right_length->Integer32Value());
2558 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2559 // The left string contains at least one character.
2560 return BuildCreateConsString(length, left, right, allocation_mode);
2561 }
2562 }
2563
2564 // Check if we should create a cons string.
2565 IfBuilder if_createcons(this);
2566 if_createcons.If<HCompareNumericAndBranch>(
2567 length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
2568 if_createcons.Then();
2569 {
2570 // Create a cons string.
2571 Push(BuildCreateConsString(length, left, right, allocation_mode));
2572 }
2573 if_createcons.Else();
2574 {
2575 // Determine the string instance types.
2576 HValue* left_instance_type = AddLoadStringInstanceType(left);
2577 HValue* right_instance_type = AddLoadStringInstanceType(right);
2578
2579 // Compute union and difference of instance types.
2580 HValue* ored_instance_types = AddUncasted<HBitwise>(
2581 Token::BIT_OR, left_instance_type, right_instance_type);
2582 HValue* xored_instance_types = AddUncasted<HBitwise>(
2583 Token::BIT_XOR, left_instance_type, right_instance_type);
2584
2585 // Check if both strings have the same encoding and both are
2586 // sequential.
2587 IfBuilder if_sameencodingandsequential(this);
2588 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2589 AddUncasted<HBitwise>(
2590 Token::BIT_AND, xored_instance_types,
2591 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2592 graph()->GetConstant0(), Token::EQ);
2593 if_sameencodingandsequential.And();
2594 STATIC_ASSERT(kSeqStringTag == 0);
2595 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2596 AddUncasted<HBitwise>(
2597 Token::BIT_AND, ored_instance_types,
2598 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
2599 graph()->GetConstant0(), Token::EQ);
2600 if_sameencodingandsequential.Then();
2601 {
2602 HConstant* string_map =
2603 Add<HConstant>(isolate()->factory()->string_map());
2604 HConstant* one_byte_string_map =
2605 Add<HConstant>(isolate()->factory()->one_byte_string_map());
2606
2607 // Determine map and size depending on whether result is one-byte string.
2608 IfBuilder if_onebyte(this);
2609 STATIC_ASSERT(kOneByteStringTag != 0);
2610 if_onebyte.If<HCompareNumericAndBranch>(
2611 AddUncasted<HBitwise>(
2612 Token::BIT_AND, ored_instance_types,
2613 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2614 graph()->GetConstant0(), Token::NE);
2615 if_onebyte.Then();
2616 {
2617 // Allocate sequential one-byte string object.
2618 Push(length);
2619 Push(one_byte_string_map);
2620 }
2621 if_onebyte.Else();
2622 {
2623 // Allocate sequential two-byte string object.
2624 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2625 size->ClearFlag(HValue::kCanOverflow);
2626 size->SetFlag(HValue::kUint32);
2627 Push(size);
2628 Push(string_map);
2629 }
2630 if_onebyte.End();
2631 HValue* map = Pop();
2632
2633 // Calculate the number of bytes needed for the characters in the
2634 // string while observing object alignment.
2635 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2636 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2637
2638 IfBuilder if_size(this);
2639 if_size.If<HCompareNumericAndBranch>(
2640 size, Add<HConstant>(Page::kMaxRegularHeapObjectSize), Token::LT);
2641 if_size.Then();
2642 {
2643 // Allocate the string object. HAllocate does not care whether we pass
2644 // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
2645 HAllocate* result =
2646 BuildAllocate(size, HType::String(), STRING_TYPE, allocation_mode);
2647 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2648
2649 // Initialize the string fields.
2650 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2651 Add<HConstant>(String::kEmptyHashField));
2652 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2653
2654 // Copy characters to the result string.
2655 IfBuilder if_twobyte(this);
2656 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2657 if_twobyte.Then();
2658 {
2659 // Copy characters from the left string.
2660 BuildCopySeqStringChars(
2661 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2662 graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length);
2663
2664 // Copy characters from the right string.
2665 BuildCopySeqStringChars(
2666 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2667 left_length, String::TWO_BYTE_ENCODING, right_length);
2668 }
2669 if_twobyte.Else();
2670 {
2671 // Copy characters from the left string.
2672 BuildCopySeqStringChars(
2673 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2674 graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length);
2675
2676 // Copy characters from the right string.
2677 BuildCopySeqStringChars(
2678 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2679 left_length, String::ONE_BYTE_ENCODING, right_length);
2680 }
2681 if_twobyte.End();
2682
2683 // Count the native string addition.
2684 AddIncrementCounter(isolate()->counters()->string_add_native());
2685
2686 // Return the sequential string.
2687 Push(result);
2688 }
2689 if_size.Else();
2690 {
2691 // Fallback to the runtime to add the two strings. The string has to be
2692 // allocated in LO space.
2693 Add<HPushArguments>(left, right);
2694 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2695 }
2696 if_size.End();
2697 }
2698 if_sameencodingandsequential.Else();
2699 {
2700 // Fallback to the runtime to add the two strings.
2701 Add<HPushArguments>(left, right);
2702 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2703 }
2704 if_sameencodingandsequential.End();
2705 }
2706 if_createcons.End();
2707
2708 return Pop();
2709}
2710
2711
2712HValue* HGraphBuilder::BuildStringAdd(
2713 HValue* left,
2714 HValue* right,
2715 HAllocationMode allocation_mode) {
2716 NoObservableSideEffectsScope no_effects(this);
2717
2718 // Determine string lengths.
2719 HValue* left_length = AddLoadStringLength(left);
2720 HValue* right_length = AddLoadStringLength(right);
2721
2722 // Check if left string is empty.
2723 IfBuilder if_leftempty(this);
2724 if_leftempty.If<HCompareNumericAndBranch>(
2725 left_length, graph()->GetConstant0(), Token::EQ);
2726 if_leftempty.Then();
2727 {
2728 // Count the native string addition.
2729 AddIncrementCounter(isolate()->counters()->string_add_native());
2730
2731 // Just return the right string.
2732 Push(right);
2733 }
2734 if_leftempty.Else();
2735 {
2736 // Check if right string is empty.
2737 IfBuilder if_rightempty(this);
2738 if_rightempty.If<HCompareNumericAndBranch>(
2739 right_length, graph()->GetConstant0(), Token::EQ);
2740 if_rightempty.Then();
2741 {
2742 // Count the native string addition.
2743 AddIncrementCounter(isolate()->counters()->string_add_native());
2744
2745 // Just return the left string.
2746 Push(left);
2747 }
2748 if_rightempty.Else();
2749 {
2750 // Add the two non-empty strings.
2751 Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2752 }
2753 if_rightempty.End();
2754 }
2755 if_leftempty.End();
2756
2757 return Pop();
2758}
2759
2760
2761HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2762 HValue* checked_object,
2763 HValue* key,
2764 HValue* val,
2765 bool is_js_array,
2766 ElementsKind elements_kind,
2767 PropertyAccessType access_type,
2768 LoadKeyedHoleMode load_mode,
2769 KeyedAccessStoreMode store_mode) {
2770 DCHECK(top_info()->IsStub() || checked_object->IsCompareMap() ||
2771 checked_object->IsCheckMaps());
2772 DCHECK(!IsFixedTypedArrayElementsKind(elements_kind) || !is_js_array);
2773 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2774 // on a HElementsTransition instruction. The flag can also be removed if the
2775 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2776 // ElementsKind transitions. Finally, the dependency can be removed for stores
2777 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2778 // generated store code.
2779 if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2780 (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2781 checked_object->ClearDependsOnFlag(kElementsKind);
2782 }
2783
2784 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2785 bool fast_elements = IsFastObjectElementsKind(elements_kind);
2786 HValue* elements = AddLoadElements(checked_object);
2787 if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2788 store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2789 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2790 elements, isolate()->factory()->fixed_array_map());
2791 check_cow_map->ClearDependsOnFlag(kElementsKind);
2792 }
2793 HInstruction* length = NULL;
2794 if (is_js_array) {
2795 length = Add<HLoadNamedField>(
2796 checked_object->ActualValue(), checked_object,
2797 HObjectAccess::ForArrayLength(elements_kind));
2798 } else {
2799 length = AddLoadFixedArrayLength(elements);
2800 }
2801 length->set_type(HType::Smi());
2802 HValue* checked_key = NULL;
2803 if (IsFixedTypedArrayElementsKind(elements_kind)) {
2804 checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
2805
2806 HValue* external_pointer = Add<HLoadNamedField>(
2807 elements, nullptr,
2808 HObjectAccess::ForFixedTypedArrayBaseExternalPointer());
2809 HValue* base_pointer = Add<HLoadNamedField>(
2810 elements, nullptr, HObjectAccess::ForFixedTypedArrayBaseBasePointer());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002811 HValue* backing_store = AddUncasted<HAdd>(external_pointer, base_pointer,
2812 AddOfExternalAndTagged);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002813
2814 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2815 NoObservableSideEffectsScope no_effects(this);
2816 IfBuilder length_checker(this);
2817 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2818 length_checker.Then();
2819 IfBuilder negative_checker(this);
2820 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2821 key, graph()->GetConstant0(), Token::GTE);
2822 negative_checker.Then();
2823 HInstruction* result = AddElementAccess(
2824 backing_store, key, val, bounds_check, checked_object->ActualValue(),
2825 elements_kind, access_type);
2826 negative_checker.ElseDeopt(Deoptimizer::kNegativeKeyEncountered);
2827 negative_checker.End();
2828 length_checker.End();
2829 return result;
2830 } else {
2831 DCHECK(store_mode == STANDARD_STORE);
2832 checked_key = Add<HBoundsCheck>(key, length);
2833 return AddElementAccess(backing_store, checked_key, val, checked_object,
2834 checked_object->ActualValue(), elements_kind,
2835 access_type);
2836 }
2837 }
2838 DCHECK(fast_smi_only_elements ||
2839 fast_elements ||
2840 IsFastDoubleElementsKind(elements_kind));
2841
2842 // In case val is stored into a fast smi array, assure that the value is a smi
2843 // before manipulating the backing store. Otherwise the actual store may
2844 // deopt, leaving the backing store in an invalid state.
2845 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2846 !val->type().IsSmi()) {
2847 val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2848 }
2849
2850 if (IsGrowStoreMode(store_mode)) {
2851 NoObservableSideEffectsScope no_effects(this);
2852 Representation representation = HStoreKeyed::RequiredValueRepresentation(
2853 elements_kind, STORE_TO_INITIALIZED_ENTRY);
2854 val = AddUncasted<HForceRepresentation>(val, representation);
2855 elements = BuildCheckForCapacityGrow(checked_object, elements,
2856 elements_kind, length, key,
2857 is_js_array, access_type);
2858 checked_key = key;
2859 } else {
2860 checked_key = Add<HBoundsCheck>(key, length);
2861
2862 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2863 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2864 NoObservableSideEffectsScope no_effects(this);
2865 elements = BuildCopyElementsOnWrite(checked_object, elements,
2866 elements_kind, length);
2867 } else {
2868 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2869 elements, isolate()->factory()->fixed_array_map());
2870 check_cow_map->ClearDependsOnFlag(kElementsKind);
2871 }
2872 }
2873 }
2874 return AddElementAccess(elements, checked_key, val, checked_object, nullptr,
2875 elements_kind, access_type, load_mode);
2876}
2877
2878
2879HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2880 JSArrayBuilder* array_builder,
2881 HValue* length_argument) {
2882 if (length_argument->IsConstant() &&
2883 HConstant::cast(length_argument)->HasSmiValue()) {
2884 int array_length = HConstant::cast(length_argument)->Integer32Value();
2885 if (array_length == 0) {
2886 return array_builder->AllocateEmptyArray();
2887 } else {
2888 return array_builder->AllocateArray(length_argument,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002889 length_argument);
2890 }
2891 }
2892
2893 HValue* constant_zero = graph()->GetConstant0();
2894 HConstant* max_alloc_length =
2895 Add<HConstant>(JSArray::kInitialMaxFastElementArray);
2896 HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2897 max_alloc_length);
2898 IfBuilder if_builder(this);
2899 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2900 Token::EQ);
2901 if_builder.Then();
2902 const int initial_capacity = JSArray::kPreallocatedArrayElements;
2903 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2904 Push(initial_capacity_node); // capacity
2905 Push(constant_zero); // length
2906 if_builder.Else();
2907 if (!(top_info()->IsStub()) &&
2908 IsFastPackedElementsKind(array_builder->kind())) {
2909 // We'll come back later with better (holey) feedback.
2910 if_builder.Deopt(
2911 Deoptimizer::kHoleyArrayDespitePackedElements_kindFeedback);
2912 } else {
2913 Push(checked_length); // capacity
2914 Push(checked_length); // length
2915 }
2916 if_builder.End();
2917
2918 // Figure out total size
2919 HValue* length = Pop();
2920 HValue* capacity = Pop();
Ben Murdochc5610432016-08-08 18:44:38 +01002921 return array_builder->AllocateArray(capacity, length);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002922}
2923
2924
2925HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
2926 HValue* capacity) {
2927 int elements_size = IsFastDoubleElementsKind(kind)
2928 ? kDoubleSize
2929 : kPointerSize;
2930
2931 HConstant* elements_size_value = Add<HConstant>(elements_size);
2932 HInstruction* mul =
2933 HMul::NewImul(isolate(), zone(), context(), capacity->ActualValue(),
2934 elements_size_value);
2935 AddInstruction(mul);
2936 mul->ClearFlag(HValue::kCanOverflow);
2937
2938 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2939
2940 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2941 HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2942 total_size->ClearFlag(HValue::kCanOverflow);
2943 return total_size;
2944}
2945
2946
2947HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
2948 int base_size = JSArray::kSize;
2949 if (mode == TRACK_ALLOCATION_SITE) {
2950 base_size += AllocationMemento::kSize;
2951 }
2952 HConstant* size_in_bytes = Add<HConstant>(base_size);
Ben Murdochc5610432016-08-08 18:44:38 +01002953 return Add<HAllocate>(size_in_bytes, HType::JSArray(), NOT_TENURED,
2954 JS_OBJECT_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002955}
2956
2957
2958HConstant* HGraphBuilder::EstablishElementsAllocationSize(
2959 ElementsKind kind,
2960 int capacity) {
2961 int base_size = IsFastDoubleElementsKind(kind)
2962 ? FixedDoubleArray::SizeFor(capacity)
2963 : FixedArray::SizeFor(capacity);
2964
2965 return Add<HConstant>(base_size);
2966}
2967
2968
2969HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2970 HValue* size_in_bytes) {
2971 InstanceType instance_type = IsFastDoubleElementsKind(kind)
2972 ? FIXED_DOUBLE_ARRAY_TYPE
2973 : FIXED_ARRAY_TYPE;
2974
2975 return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
Ben Murdochc5610432016-08-08 18:44:38 +01002976 instance_type, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002977}
2978
2979
2980void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2981 ElementsKind kind,
2982 HValue* capacity) {
2983 Factory* factory = isolate()->factory();
2984 Handle<Map> map = IsFastDoubleElementsKind(kind)
2985 ? factory->fixed_double_array_map()
2986 : factory->fixed_array_map();
2987
2988 Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
2989 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2990 capacity);
2991}
2992
2993
2994HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind,
2995 HValue* capacity) {
2996 // The HForceRepresentation is to prevent possible deopt on int-smi
2997 // conversion after allocation but before the new object fields are set.
2998 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2999 HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
3000 HValue* new_array = BuildAllocateElements(kind, size_in_bytes);
3001 BuildInitializeElementsHeader(new_array, kind, capacity);
3002 return new_array;
3003}
3004
3005
3006void HGraphBuilder::BuildJSArrayHeader(HValue* array,
3007 HValue* array_map,
3008 HValue* elements,
3009 AllocationSiteMode mode,
3010 ElementsKind elements_kind,
3011 HValue* allocation_site_payload,
3012 HValue* length_field) {
3013 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
3014
3015 HConstant* empty_fixed_array =
3016 Add<HConstant>(isolate()->factory()->empty_fixed_array());
3017
3018 Add<HStoreNamedField>(
3019 array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
3020
3021 Add<HStoreNamedField>(
3022 array, HObjectAccess::ForElementsPointer(),
3023 elements != NULL ? elements : empty_fixed_array);
3024
3025 Add<HStoreNamedField>(
3026 array, HObjectAccess::ForArrayLength(elements_kind), length_field);
3027
3028 if (mode == TRACK_ALLOCATION_SITE) {
3029 BuildCreateAllocationMemento(
3030 array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
3031 }
3032}
3033
3034
3035HInstruction* HGraphBuilder::AddElementAccess(
3036 HValue* elements, HValue* checked_key, HValue* val, HValue* dependency,
3037 HValue* backing_store_owner, ElementsKind elements_kind,
3038 PropertyAccessType access_type, LoadKeyedHoleMode load_mode) {
3039 if (access_type == STORE) {
3040 DCHECK(val != NULL);
3041 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
3042 val = Add<HClampToUint8>(val);
3043 }
3044 return Add<HStoreKeyed>(elements, checked_key, val, backing_store_owner,
3045 elements_kind, STORE_TO_INITIALIZED_ENTRY);
3046 }
3047
3048 DCHECK(access_type == LOAD);
3049 DCHECK(val == NULL);
3050 HLoadKeyed* load =
3051 Add<HLoadKeyed>(elements, checked_key, dependency, backing_store_owner,
3052 elements_kind, load_mode);
3053 if (elements_kind == UINT32_ELEMENTS) {
3054 graph()->RecordUint32Instruction(load);
3055 }
3056 return load;
3057}
3058
3059
3060HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
3061 HValue* dependency) {
3062 return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
3063}
3064
3065
3066HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
3067 HValue* dependency) {
3068 return Add<HLoadNamedField>(
3069 object, dependency, HObjectAccess::ForElementsPointer());
3070}
3071
3072
3073HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
3074 HValue* array,
3075 HValue* dependency) {
3076 return Add<HLoadNamedField>(
3077 array, dependency, HObjectAccess::ForFixedArrayLength());
3078}
3079
3080
3081HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
3082 ElementsKind kind,
3083 HValue* dependency) {
3084 return Add<HLoadNamedField>(
3085 array, dependency, HObjectAccess::ForArrayLength(kind));
3086}
3087
3088
3089HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
3090 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
3091 graph_->GetConstant1());
3092
3093 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
3094 new_capacity->ClearFlag(HValue::kCanOverflow);
3095
3096 HValue* min_growth = Add<HConstant>(16);
3097
3098 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
3099 new_capacity->ClearFlag(HValue::kCanOverflow);
3100
3101 return new_capacity;
3102}
3103
3104
3105HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
3106 HValue* elements,
3107 ElementsKind kind,
3108 ElementsKind new_kind,
3109 HValue* length,
3110 HValue* new_capacity) {
3111 Add<HBoundsCheck>(new_capacity, Add<HConstant>(
3112 (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
3113 ElementsKindToShiftSize(new_kind)));
3114
3115 HValue* new_elements =
3116 BuildAllocateAndInitializeArray(new_kind, new_capacity);
3117
3118 BuildCopyElements(elements, kind, new_elements,
3119 new_kind, length, new_capacity);
3120
3121 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
3122 new_elements);
3123
3124 return new_elements;
3125}
3126
3127
3128void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
3129 ElementsKind elements_kind,
3130 HValue* from,
3131 HValue* to,
3132 HValue* value) {
3133 if (to == NULL) {
3134 to = AddLoadFixedArrayLength(elements);
3135 }
3136
3137 // Special loop unfolding case
3138 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
3139 kElementLoopUnrollThreshold);
3140 int initial_capacity = -1;
3141 if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
3142 int constant_from = from->GetInteger32Constant();
3143 int constant_to = to->GetInteger32Constant();
3144
3145 if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
3146 initial_capacity = constant_to;
3147 }
3148 }
3149
3150 if (initial_capacity >= 0) {
3151 for (int i = 0; i < initial_capacity; i++) {
3152 HInstruction* key = Add<HConstant>(i);
3153 Add<HStoreKeyed>(elements, key, value, nullptr, elements_kind);
3154 }
3155 } else {
3156 // Carefully loop backwards so that the "from" remains live through the loop
3157 // rather than the to. This often corresponds to keeping length live rather
3158 // then capacity, which helps register allocation, since length is used more
3159 // other than capacity after filling with holes.
3160 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
3161
3162 HValue* key = builder.BeginBody(to, from, Token::GT);
3163
3164 HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
3165 adjusted_key->ClearFlag(HValue::kCanOverflow);
3166
3167 Add<HStoreKeyed>(elements, adjusted_key, value, nullptr, elements_kind);
3168
3169 builder.EndBody();
3170 }
3171}
3172
3173
3174void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
3175 ElementsKind elements_kind,
3176 HValue* from,
3177 HValue* to) {
3178 // Fast elements kinds need to be initialized in case statements below cause a
3179 // garbage collection.
3180
3181 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
3182 ? graph()->GetConstantHole()
3183 : Add<HConstant>(HConstant::kHoleNaN);
3184
3185 // Since we're about to store a hole value, the store instruction below must
3186 // assume an elements kind that supports heap object values.
3187 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
3188 elements_kind = FAST_HOLEY_ELEMENTS;
3189 }
3190
3191 BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
3192}
3193
3194
3195void HGraphBuilder::BuildCopyProperties(HValue* from_properties,
3196 HValue* to_properties, HValue* length,
3197 HValue* capacity) {
3198 ElementsKind kind = FAST_ELEMENTS;
3199
3200 BuildFillElementsWithValue(to_properties, kind, length, capacity,
3201 graph()->GetConstantUndefined());
3202
3203 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
3204
3205 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT);
3206
3207 key = AddUncasted<HSub>(key, graph()->GetConstant1());
3208 key->ClearFlag(HValue::kCanOverflow);
3209
3210 HValue* element =
3211 Add<HLoadKeyed>(from_properties, key, nullptr, nullptr, kind);
3212
3213 Add<HStoreKeyed>(to_properties, key, element, nullptr, kind);
3214
3215 builder.EndBody();
3216}
3217
3218
3219void HGraphBuilder::BuildCopyElements(HValue* from_elements,
3220 ElementsKind from_elements_kind,
3221 HValue* to_elements,
3222 ElementsKind to_elements_kind,
3223 HValue* length,
3224 HValue* capacity) {
3225 int constant_capacity = -1;
3226 if (capacity != NULL &&
3227 capacity->IsConstant() &&
3228 HConstant::cast(capacity)->HasInteger32Value()) {
3229 int constant_candidate = HConstant::cast(capacity)->Integer32Value();
3230 if (constant_candidate <= kElementLoopUnrollThreshold) {
3231 constant_capacity = constant_candidate;
3232 }
3233 }
3234
3235 bool pre_fill_with_holes =
3236 IsFastDoubleElementsKind(from_elements_kind) &&
3237 IsFastObjectElementsKind(to_elements_kind);
3238 if (pre_fill_with_holes) {
3239 // If the copy might trigger a GC, make sure that the FixedArray is
3240 // pre-initialized with holes to make sure that it's always in a
3241 // consistent state.
3242 BuildFillElementsWithHole(to_elements, to_elements_kind,
3243 graph()->GetConstant0(), NULL);
3244 }
3245
3246 if (constant_capacity != -1) {
3247 // Unroll the loop for small elements kinds.
3248 for (int i = 0; i < constant_capacity; i++) {
3249 HValue* key_constant = Add<HConstant>(i);
3250 HInstruction* value = Add<HLoadKeyed>(
3251 from_elements, key_constant, nullptr, nullptr, from_elements_kind);
3252 Add<HStoreKeyed>(to_elements, key_constant, value, nullptr,
3253 to_elements_kind);
3254 }
3255 } else {
3256 if (!pre_fill_with_holes &&
3257 (capacity == NULL || !length->Equals(capacity))) {
3258 BuildFillElementsWithHole(to_elements, to_elements_kind,
3259 length, NULL);
3260 }
3261
3262 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
3263
3264 HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
3265 Token::GT);
3266
3267 key = AddUncasted<HSub>(key, graph()->GetConstant1());
3268 key->ClearFlag(HValue::kCanOverflow);
3269
3270 HValue* element = Add<HLoadKeyed>(from_elements, key, nullptr, nullptr,
3271 from_elements_kind, ALLOW_RETURN_HOLE);
3272
3273 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
3274 IsFastSmiElementsKind(to_elements_kind))
3275 ? FAST_HOLEY_ELEMENTS : to_elements_kind;
3276
3277 if (IsHoleyElementsKind(from_elements_kind) &&
3278 from_elements_kind != to_elements_kind) {
3279 IfBuilder if_hole(this);
3280 if_hole.If<HCompareHoleAndBranch>(element);
3281 if_hole.Then();
3282 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
3283 ? Add<HConstant>(HConstant::kHoleNaN)
3284 : graph()->GetConstantHole();
3285 Add<HStoreKeyed>(to_elements, key, hole_constant, nullptr, kind);
3286 if_hole.Else();
3287 HStoreKeyed* store =
3288 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
3289 store->SetFlag(HValue::kAllowUndefinedAsNaN);
3290 if_hole.End();
3291 } else {
3292 HStoreKeyed* store =
3293 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
3294 store->SetFlag(HValue::kAllowUndefinedAsNaN);
3295 }
3296
3297 builder.EndBody();
3298 }
3299
3300 Counters* counters = isolate()->counters();
3301 AddIncrementCounter(counters->inlined_copied_elements());
3302}
3303
3304
3305HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
3306 HValue* allocation_site,
3307 AllocationSiteMode mode,
3308 ElementsKind kind) {
3309 HAllocate* array = AllocateJSArrayObject(mode);
3310
3311 HValue* map = AddLoadMap(boilerplate);
3312 HValue* elements = AddLoadElements(boilerplate);
3313 HValue* length = AddLoadArrayLength(boilerplate, kind);
3314
3315 BuildJSArrayHeader(array,
3316 map,
3317 elements,
3318 mode,
3319 FAST_ELEMENTS,
3320 allocation_site,
3321 length);
3322 return array;
3323}
3324
3325
3326HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
3327 HValue* allocation_site,
3328 AllocationSiteMode mode) {
3329 HAllocate* array = AllocateJSArrayObject(mode);
3330
3331 HValue* map = AddLoadMap(boilerplate);
3332
3333 BuildJSArrayHeader(array,
3334 map,
3335 NULL, // set elements to empty fixed array
3336 mode,
3337 FAST_ELEMENTS,
3338 allocation_site,
3339 graph()->GetConstant0());
3340 return array;
3341}
3342
3343
3344HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
3345 HValue* allocation_site,
3346 AllocationSiteMode mode,
3347 ElementsKind kind) {
3348 HValue* boilerplate_elements = AddLoadElements(boilerplate);
3349 HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
3350
3351 // Generate size calculation code here in order to make it dominate
3352 // the JSArray allocation.
3353 HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
3354
3355 // Create empty JSArray object for now, store elimination should remove
3356 // redundant initialization of elements and length fields and at the same
3357 // time the object will be fully prepared for GC if it happens during
3358 // elements allocation.
3359 HValue* result = BuildCloneShallowArrayEmpty(
3360 boilerplate, allocation_site, mode);
3361
3362 HAllocate* elements = BuildAllocateElements(kind, elements_size);
3363
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003364 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
3365
3366 // The allocation for the cloned array above causes register pressure on
3367 // machines with low register counts. Force a reload of the boilerplate
3368 // elements here to free up a register for the allocation to avoid unnecessary
3369 // spillage.
3370 boilerplate_elements = AddLoadElements(boilerplate);
3371 boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
3372
3373 // Copy the elements array header.
3374 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
3375 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
3376 Add<HStoreNamedField>(
3377 elements, access,
3378 Add<HLoadNamedField>(boilerplate_elements, nullptr, access));
3379 }
3380
3381 // And the result of the length
3382 HValue* length = AddLoadArrayLength(boilerplate, kind);
3383 Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
3384
3385 BuildCopyElements(boilerplate_elements, kind, elements,
3386 kind, length, NULL);
3387 return result;
3388}
3389
3390
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003391void HGraphBuilder::BuildCreateAllocationMemento(
3392 HValue* previous_object,
3393 HValue* previous_object_size,
3394 HValue* allocation_site) {
3395 DCHECK(allocation_site != NULL);
3396 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
3397 previous_object, previous_object_size, HType::HeapObject());
3398 AddStoreMapConstant(
3399 allocation_memento, isolate()->factory()->allocation_memento_map());
3400 Add<HStoreNamedField>(
3401 allocation_memento,
3402 HObjectAccess::ForAllocationMementoSite(),
3403 allocation_site);
3404 if (FLAG_allocation_site_pretenuring) {
3405 HValue* memento_create_count =
3406 Add<HLoadNamedField>(allocation_site, nullptr,
3407 HObjectAccess::ForAllocationSiteOffset(
3408 AllocationSite::kPretenureCreateCountOffset));
3409 memento_create_count = AddUncasted<HAdd>(
3410 memento_create_count, graph()->GetConstant1());
3411 // This smi value is reset to zero after every gc, overflow isn't a problem
3412 // since the counter is bounded by the new space size.
3413 memento_create_count->ClearFlag(HValue::kCanOverflow);
3414 Add<HStoreNamedField>(
3415 allocation_site, HObjectAccess::ForAllocationSiteOffset(
3416 AllocationSite::kPretenureCreateCountOffset), memento_create_count);
3417 }
3418}
3419
3420
3421HInstruction* HGraphBuilder::BuildGetNativeContext() {
3422 return Add<HLoadNamedField>(
3423 context(), nullptr,
3424 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
3425}
3426
3427
3428HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
3429 // Get the global object, then the native context
3430 HInstruction* context = Add<HLoadNamedField>(
3431 closure, nullptr, HObjectAccess::ForFunctionContextPointer());
3432 return Add<HLoadNamedField>(
3433 context, nullptr,
3434 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
3435}
3436
3437
3438HInstruction* HGraphBuilder::BuildGetScriptContext(int context_index) {
3439 HValue* native_context = BuildGetNativeContext();
3440 HValue* script_context_table = Add<HLoadNamedField>(
3441 native_context, nullptr,
3442 HObjectAccess::ForContextSlot(Context::SCRIPT_CONTEXT_TABLE_INDEX));
3443 return Add<HLoadNamedField>(script_context_table, nullptr,
3444 HObjectAccess::ForScriptContext(context_index));
3445}
3446
3447
3448HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) {
3449 HValue* script_context = context();
3450 if (depth != NULL) {
3451 HValue* zero = graph()->GetConstant0();
3452
3453 Push(script_context);
3454 Push(depth);
3455
3456 LoopBuilder loop(this);
3457 loop.BeginBody(2); // Drop script_context and depth from last environment
3458 // to appease live range building without simulates.
3459 depth = Pop();
3460 script_context = Pop();
3461
3462 script_context = Add<HLoadNamedField>(
3463 script_context, nullptr,
3464 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3465 depth = AddUncasted<HSub>(depth, graph()->GetConstant1());
3466 depth->ClearFlag(HValue::kCanOverflow);
3467
3468 IfBuilder if_break(this);
3469 if_break.If<HCompareNumericAndBranch, HValue*>(depth, zero, Token::EQ);
3470 if_break.Then();
3471 {
3472 Push(script_context); // The result.
3473 loop.Break();
3474 }
3475 if_break.Else();
3476 {
3477 Push(script_context);
3478 Push(depth);
3479 }
3480 loop.EndBody();
3481 if_break.End();
3482
3483 script_context = Pop();
3484 } else if (depth_value > 0) {
3485 // Unroll the above loop.
3486 for (int i = 0; i < depth_value; i++) {
3487 script_context = Add<HLoadNamedField>(
3488 script_context, nullptr,
3489 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3490 }
3491 }
3492 return script_context;
3493}
3494
3495
3496HInstruction* HGraphBuilder::BuildGetArrayFunction() {
3497 HInstruction* native_context = BuildGetNativeContext();
3498 HInstruction* index =
3499 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
3500 return Add<HLoadKeyed>(native_context, index, nullptr, nullptr,
3501 FAST_ELEMENTS);
3502}
3503
3504
3505HValue* HGraphBuilder::BuildArrayBufferViewFieldAccessor(HValue* object,
3506 HValue* checked_object,
3507 FieldIndex index) {
3508 NoObservableSideEffectsScope scope(this);
3509 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3510 index.offset(), Representation::Tagged());
3511 HInstruction* buffer = Add<HLoadNamedField>(
3512 object, checked_object, HObjectAccess::ForJSArrayBufferViewBuffer());
3513 HInstruction* field = Add<HLoadNamedField>(object, checked_object, access);
3514
3515 HInstruction* flags = Add<HLoadNamedField>(
3516 buffer, nullptr, HObjectAccess::ForJSArrayBufferBitField());
3517 HValue* was_neutered_mask =
3518 Add<HConstant>(1 << JSArrayBuffer::WasNeutered::kShift);
3519 HValue* was_neutered_test =
3520 AddUncasted<HBitwise>(Token::BIT_AND, flags, was_neutered_mask);
3521
3522 IfBuilder if_was_neutered(this);
3523 if_was_neutered.If<HCompareNumericAndBranch>(
3524 was_neutered_test, graph()->GetConstant0(), Token::NE);
3525 if_was_neutered.Then();
3526 Push(graph()->GetConstant0());
3527 if_was_neutered.Else();
3528 Push(field);
3529 if_was_neutered.End();
3530
3531 return Pop();
3532}
3533
3534
3535HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3536 ElementsKind kind,
3537 HValue* allocation_site_payload,
3538 HValue* constructor_function,
3539 AllocationSiteOverrideMode override_mode) :
3540 builder_(builder),
3541 kind_(kind),
3542 allocation_site_payload_(allocation_site_payload),
3543 constructor_function_(constructor_function) {
3544 DCHECK(!allocation_site_payload->IsConstant() ||
3545 HConstant::cast(allocation_site_payload)->handle(
3546 builder_->isolate())->IsAllocationSite());
3547 mode_ = override_mode == DISABLE_ALLOCATION_SITES
3548 ? DONT_TRACK_ALLOCATION_SITE
3549 : AllocationSite::GetMode(kind);
3550}
3551
3552
3553HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3554 ElementsKind kind,
3555 HValue* constructor_function) :
3556 builder_(builder),
3557 kind_(kind),
3558 mode_(DONT_TRACK_ALLOCATION_SITE),
3559 allocation_site_payload_(NULL),
3560 constructor_function_(constructor_function) {
3561}
3562
3563
3564HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
3565 if (!builder()->top_info()->IsStub()) {
3566 // A constant map is fine.
3567 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
3568 builder()->isolate());
3569 return builder()->Add<HConstant>(map);
3570 }
3571
3572 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
3573 // No need for a context lookup if the kind_ matches the initial
3574 // map, because we can just load the map in that case.
3575 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3576 return builder()->Add<HLoadNamedField>(constructor_function_, nullptr,
3577 access);
3578 }
3579
3580 // TODO(mvstanton): we should always have a constructor function if we
3581 // are creating a stub.
3582 HInstruction* native_context = constructor_function_ != NULL
3583 ? builder()->BuildGetNativeContext(constructor_function_)
3584 : builder()->BuildGetNativeContext();
3585
3586 HObjectAccess access =
3587 HObjectAccess::ForContextSlot(Context::ArrayMapIndex(kind_));
3588 return builder()->Add<HLoadNamedField>(native_context, nullptr, access);
3589}
3590
3591
3592HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
3593 // Find the map near the constructor function
3594 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3595 return builder()->Add<HLoadNamedField>(constructor_function_, nullptr,
3596 access);
3597}
3598
3599
3600HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
3601 HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
3602 return AllocateArray(capacity,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003603 builder()->graph()->GetConstant0());
3604}
3605
3606
3607HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3608 HValue* capacity,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003609 HValue* length_field,
3610 FillMode fill_mode) {
3611 // These HForceRepresentations are because we store these as fields in the
3612 // objects we construct, and an int32-to-smi HChange could deopt. Accept
3613 // the deopt possibility now, before allocation occurs.
3614 capacity =
3615 builder()->AddUncasted<HForceRepresentation>(capacity,
3616 Representation::Smi());
3617 length_field =
3618 builder()->AddUncasted<HForceRepresentation>(length_field,
3619 Representation::Smi());
3620
3621 // Generate size calculation code here in order to make it dominate
3622 // the JSArray allocation.
3623 HValue* elements_size =
3624 builder()->BuildCalculateElementsSize(kind_, capacity);
3625
3626 // Bail out for large objects.
3627 HValue* max_regular_heap_object_size =
3628 builder()->Add<HConstant>(Page::kMaxRegularHeapObjectSize);
3629 builder()->Add<HBoundsCheck>(elements_size, max_regular_heap_object_size);
3630
3631 // Allocate (dealing with failure appropriately)
3632 HAllocate* array_object = builder()->AllocateJSArrayObject(mode_);
3633
3634 // Fill in the fields: map, properties, length
3635 HValue* map;
3636 if (allocation_site_payload_ == NULL) {
3637 map = EmitInternalMapCode();
3638 } else {
3639 map = EmitMapCode();
3640 }
3641
3642 builder()->BuildJSArrayHeader(array_object,
3643 map,
3644 NULL, // set elements to empty fixed array
3645 mode_,
3646 kind_,
3647 allocation_site_payload_,
3648 length_field);
3649
3650 // Allocate and initialize the elements
3651 elements_location_ = builder()->BuildAllocateElements(kind_, elements_size);
3652
3653 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
3654
3655 // Set the elements
3656 builder()->Add<HStoreNamedField>(
3657 array_object, HObjectAccess::ForElementsPointer(), elements_location_);
3658
3659 if (fill_mode == FILL_WITH_HOLE) {
3660 builder()->BuildFillElementsWithHole(elements_location_, kind_,
3661 graph()->GetConstant0(), capacity);
3662 }
3663
3664 return array_object;
3665}
3666
3667
3668HValue* HGraphBuilder::AddLoadJSBuiltin(int context_index) {
3669 HValue* native_context = BuildGetNativeContext();
3670 HObjectAccess function_access = HObjectAccess::ForContextSlot(context_index);
3671 return Add<HLoadNamedField>(native_context, nullptr, function_access);
3672}
3673
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003674HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
Ben Murdoch097c5b22016-05-18 11:27:45 +01003675 : HGraphBuilder(info, CallInterfaceDescriptor()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003676 function_state_(NULL),
Ben Murdochda12d292016-06-02 14:46:10 +01003677 initial_function_state_(this, info, NORMAL_RETURN, 0,
3678 TailCallMode::kAllow),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003679 ast_context_(NULL),
3680 break_scope_(NULL),
3681 inlined_count_(0),
3682 globals_(10, info->zone()),
Ben Murdochc5610432016-08-08 18:44:38 +01003683 osr_(new (info->zone()) HOsrBuilder(this)),
3684 bounds_(info->zone()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003685 // This is not initialized in the initializer list because the
3686 // constructor for the initial state relies on function_state_ == NULL
3687 // to know it's the initial state.
3688 function_state_ = &initial_function_state_;
3689 InitializeAstVisitor(info->isolate());
3690 if (top_info()->is_tracking_positions()) {
3691 SetSourcePosition(info->shared_info()->start_position());
3692 }
3693}
3694
3695
3696HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3697 HBasicBlock* second,
3698 BailoutId join_id) {
3699 if (first == NULL) {
3700 return second;
3701 } else if (second == NULL) {
3702 return first;
3703 } else {
3704 HBasicBlock* join_block = graph()->CreateBasicBlock();
3705 Goto(first, join_block);
3706 Goto(second, join_block);
3707 join_block->SetJoinId(join_id);
3708 return join_block;
3709 }
3710}
3711
3712
3713HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
3714 HBasicBlock* exit_block,
3715 HBasicBlock* continue_block) {
3716 if (continue_block != NULL) {
3717 if (exit_block != NULL) Goto(exit_block, continue_block);
3718 continue_block->SetJoinId(statement->ContinueId());
3719 return continue_block;
3720 }
3721 return exit_block;
3722}
3723
3724
3725HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3726 HBasicBlock* loop_entry,
3727 HBasicBlock* body_exit,
3728 HBasicBlock* loop_successor,
3729 HBasicBlock* break_block) {
3730 if (body_exit != NULL) Goto(body_exit, loop_entry);
3731 loop_entry->PostProcessLoopHeader(statement);
3732 if (break_block != NULL) {
3733 if (loop_successor != NULL) Goto(loop_successor, break_block);
3734 break_block->SetJoinId(statement->ExitId());
3735 return break_block;
3736 }
3737 return loop_successor;
3738}
3739
3740
3741// Build a new loop header block and set it as the current block.
3742HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3743 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3744 Goto(loop_entry);
3745 set_current_block(loop_entry);
3746 return loop_entry;
3747}
3748
3749
3750HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3751 IterationStatement* statement) {
Ben Murdochda12d292016-06-02 14:46:10 +01003752 HBasicBlock* loop_entry;
3753
3754 if (osr()->HasOsrEntryAt(statement)) {
3755 loop_entry = osr()->BuildOsrLoopEntry(statement);
3756 if (function_state()->IsInsideDoExpressionScope()) {
3757 Bailout(kDoExpressionUnmodelable);
3758 }
3759 } else {
3760 loop_entry = BuildLoopEntry();
3761 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003762 return loop_entry;
3763}
3764
3765
3766void HBasicBlock::FinishExit(HControlInstruction* instruction,
3767 SourcePosition position) {
3768 Finish(instruction, position);
3769 ClearEnvironment();
3770}
3771
3772
3773std::ostream& operator<<(std::ostream& os, const HBasicBlock& b) {
3774 return os << "B" << b.block_id();
3775}
3776
Ben Murdoch097c5b22016-05-18 11:27:45 +01003777HGraph::HGraph(CompilationInfo* info, CallInterfaceDescriptor descriptor)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003778 : isolate_(info->isolate()),
3779 next_block_id_(0),
3780 entry_block_(NULL),
3781 blocks_(8, info->zone()),
3782 values_(16, info->zone()),
3783 phi_list_(NULL),
3784 uint32_instructions_(NULL),
3785 osr_(NULL),
3786 info_(info),
Ben Murdoch097c5b22016-05-18 11:27:45 +01003787 descriptor_(descriptor),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003788 zone_(info->zone()),
Ben Murdochc5610432016-08-08 18:44:38 +01003789 allow_code_motion_(false),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003790 use_optimistic_licm_(false),
3791 depends_on_empty_array_proto_elements_(false),
3792 type_change_checksum_(0),
3793 maximum_environment_size_(0),
3794 no_side_effects_scope_count_(0),
Ben Murdochc5610432016-08-08 18:44:38 +01003795 disallow_adding_new_values_(false),
3796 inlined_function_infos_(info->zone()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003797 if (info->IsStub()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003798 // For stubs, explicitly add the context to the environment.
3799 start_environment_ = new (zone_)
3800 HEnvironment(zone_, descriptor.GetRegisterParameterCount() + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003801 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003802 start_environment_ =
3803 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3804 }
3805 start_environment_->set_ast_id(BailoutId::FunctionContext());
3806 entry_block_ = CreateBasicBlock();
3807 entry_block_->SetInitialEnvironment(start_environment_);
3808}
3809
3810
3811HBasicBlock* HGraph::CreateBasicBlock() {
3812 HBasicBlock* result = new(zone()) HBasicBlock(this);
3813 blocks_.Add(result, zone());
3814 return result;
3815}
3816
3817
3818void HGraph::FinalizeUniqueness() {
3819 DisallowHeapAllocation no_gc;
3820 for (int i = 0; i < blocks()->length(); ++i) {
3821 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3822 it.Current()->FinalizeUniqueness();
3823 }
3824 }
3825}
3826
3827
3828int HGraph::SourcePositionToScriptPosition(SourcePosition pos) {
3829 return (FLAG_hydrogen_track_positions && !pos.IsUnknown())
Ben Murdochc5610432016-08-08 18:44:38 +01003830 ? inlined_function_infos_.at(pos.inlining_id()).start_position +
3831 pos.position()
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003832 : pos.raw();
3833}
3834
3835
3836// Block ordering was implemented with two mutually recursive methods,
3837// HGraph::Postorder and HGraph::PostorderLoopBlocks.
3838// The recursion could lead to stack overflow so the algorithm has been
3839// implemented iteratively.
3840// At a high level the algorithm looks like this:
3841//
3842// Postorder(block, loop_header) : {
3843// if (block has already been visited or is of another loop) return;
3844// mark block as visited;
3845// if (block is a loop header) {
3846// VisitLoopMembers(block, loop_header);
3847// VisitSuccessorsOfLoopHeader(block);
3848// } else {
3849// VisitSuccessors(block)
3850// }
3851// put block in result list;
3852// }
3853//
3854// VisitLoopMembers(block, outer_loop_header) {
3855// foreach (block b in block loop members) {
3856// VisitSuccessorsOfLoopMember(b, outer_loop_header);
3857// if (b is loop header) VisitLoopMembers(b);
3858// }
3859// }
3860//
3861// VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3862// foreach (block b in block successors) Postorder(b, outer_loop_header)
3863// }
3864//
3865// VisitSuccessorsOfLoopHeader(block) {
3866// foreach (block b in block successors) Postorder(b, block)
3867// }
3868//
3869// VisitSuccessors(block, loop_header) {
3870// foreach (block b in block successors) Postorder(b, loop_header)
3871// }
3872//
3873// The ordering is started calling Postorder(entry, NULL).
3874//
3875// Each instance of PostorderProcessor represents the "stack frame" of the
3876// recursion, and particularly keeps the state of the loop (iteration) of the
3877// "Visit..." function it represents.
3878// To recycle memory we keep all the frames in a double linked list but
3879// this means that we cannot use constructors to initialize the frames.
3880//
3881class PostorderProcessor : public ZoneObject {
3882 public:
3883 // Back link (towards the stack bottom).
3884 PostorderProcessor* parent() {return father_; }
3885 // Forward link (towards the stack top).
3886 PostorderProcessor* child() {return child_; }
3887 HBasicBlock* block() { return block_; }
3888 HLoopInformation* loop() { return loop_; }
3889 HBasicBlock* loop_header() { return loop_header_; }
3890
3891 static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3892 HBasicBlock* block) {
3893 PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3894 return result->SetupSuccessors(zone, block, NULL);
3895 }
3896
3897 PostorderProcessor* PerformStep(Zone* zone,
3898 ZoneList<HBasicBlock*>* order) {
3899 PostorderProcessor* next =
3900 PerformNonBacktrackingStep(zone, order);
3901 if (next != NULL) {
3902 return next;
3903 } else {
3904 return Backtrack(zone, order);
3905 }
3906 }
3907
3908 private:
3909 explicit PostorderProcessor(PostorderProcessor* father)
3910 : father_(father), child_(NULL), successor_iterator(NULL) { }
3911
3912 // Each enum value states the cycle whose state is kept by this instance.
3913 enum LoopKind {
3914 NONE,
3915 SUCCESSORS,
3916 SUCCESSORS_OF_LOOP_HEADER,
3917 LOOP_MEMBERS,
3918 SUCCESSORS_OF_LOOP_MEMBER
3919 };
3920
3921 // Each "Setup..." method is like a constructor for a cycle state.
3922 PostorderProcessor* SetupSuccessors(Zone* zone,
3923 HBasicBlock* block,
3924 HBasicBlock* loop_header) {
3925 if (block == NULL || block->IsOrdered() ||
3926 block->parent_loop_header() != loop_header) {
3927 kind_ = NONE;
3928 block_ = NULL;
3929 loop_ = NULL;
3930 loop_header_ = NULL;
3931 return this;
3932 } else {
3933 block_ = block;
3934 loop_ = NULL;
3935 block->MarkAsOrdered();
3936
3937 if (block->IsLoopHeader()) {
3938 kind_ = SUCCESSORS_OF_LOOP_HEADER;
3939 loop_header_ = block;
3940 InitializeSuccessors();
3941 PostorderProcessor* result = Push(zone);
3942 return result->SetupLoopMembers(zone, block, block->loop_information(),
3943 loop_header);
3944 } else {
3945 DCHECK(block->IsFinished());
3946 kind_ = SUCCESSORS;
3947 loop_header_ = loop_header;
3948 InitializeSuccessors();
3949 return this;
3950 }
3951 }
3952 }
3953
3954 PostorderProcessor* SetupLoopMembers(Zone* zone,
3955 HBasicBlock* block,
3956 HLoopInformation* loop,
3957 HBasicBlock* loop_header) {
3958 kind_ = LOOP_MEMBERS;
3959 block_ = block;
3960 loop_ = loop;
3961 loop_header_ = loop_header;
3962 InitializeLoopMembers();
3963 return this;
3964 }
3965
3966 PostorderProcessor* SetupSuccessorsOfLoopMember(
3967 HBasicBlock* block,
3968 HLoopInformation* loop,
3969 HBasicBlock* loop_header) {
3970 kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3971 block_ = block;
3972 loop_ = loop;
3973 loop_header_ = loop_header;
3974 InitializeSuccessors();
3975 return this;
3976 }
3977
3978 // This method "allocates" a new stack frame.
3979 PostorderProcessor* Push(Zone* zone) {
3980 if (child_ == NULL) {
3981 child_ = new(zone) PostorderProcessor(this);
3982 }
3983 return child_;
3984 }
3985
3986 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3987 DCHECK(block_->end()->FirstSuccessor() == NULL ||
3988 order->Contains(block_->end()->FirstSuccessor()) ||
3989 block_->end()->FirstSuccessor()->IsLoopHeader());
3990 DCHECK(block_->end()->SecondSuccessor() == NULL ||
3991 order->Contains(block_->end()->SecondSuccessor()) ||
3992 block_->end()->SecondSuccessor()->IsLoopHeader());
3993 order->Add(block_, zone);
3994 }
3995
3996 // This method is the basic block to walk up the stack.
3997 PostorderProcessor* Pop(Zone* zone,
3998 ZoneList<HBasicBlock*>* order) {
3999 switch (kind_) {
4000 case SUCCESSORS:
4001 case SUCCESSORS_OF_LOOP_HEADER:
4002 ClosePostorder(order, zone);
4003 return father_;
4004 case LOOP_MEMBERS:
4005 return father_;
4006 case SUCCESSORS_OF_LOOP_MEMBER:
4007 if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
4008 // In this case we need to perform a LOOP_MEMBERS cycle so we
4009 // initialize it and return this instead of father.
4010 return SetupLoopMembers(zone, block(),
4011 block()->loop_information(), loop_header_);
4012 } else {
4013 return father_;
4014 }
4015 case NONE:
4016 return father_;
4017 }
4018 UNREACHABLE();
4019 return NULL;
4020 }
4021
4022 // Walks up the stack.
4023 PostorderProcessor* Backtrack(Zone* zone,
4024 ZoneList<HBasicBlock*>* order) {
4025 PostorderProcessor* parent = Pop(zone, order);
4026 while (parent != NULL) {
4027 PostorderProcessor* next =
4028 parent->PerformNonBacktrackingStep(zone, order);
4029 if (next != NULL) {
4030 return next;
4031 } else {
4032 parent = parent->Pop(zone, order);
4033 }
4034 }
4035 return NULL;
4036 }
4037
4038 PostorderProcessor* PerformNonBacktrackingStep(
4039 Zone* zone,
4040 ZoneList<HBasicBlock*>* order) {
4041 HBasicBlock* next_block;
4042 switch (kind_) {
4043 case SUCCESSORS:
4044 next_block = AdvanceSuccessors();
4045 if (next_block != NULL) {
4046 PostorderProcessor* result = Push(zone);
4047 return result->SetupSuccessors(zone, next_block, loop_header_);
4048 }
4049 break;
4050 case SUCCESSORS_OF_LOOP_HEADER:
4051 next_block = AdvanceSuccessors();
4052 if (next_block != NULL) {
4053 PostorderProcessor* result = Push(zone);
4054 return result->SetupSuccessors(zone, next_block, block());
4055 }
4056 break;
4057 case LOOP_MEMBERS:
4058 next_block = AdvanceLoopMembers();
4059 if (next_block != NULL) {
4060 PostorderProcessor* result = Push(zone);
4061 return result->SetupSuccessorsOfLoopMember(next_block,
4062 loop_, loop_header_);
4063 }
4064 break;
4065 case SUCCESSORS_OF_LOOP_MEMBER:
4066 next_block = AdvanceSuccessors();
4067 if (next_block != NULL) {
4068 PostorderProcessor* result = Push(zone);
4069 return result->SetupSuccessors(zone, next_block, loop_header_);
4070 }
4071 break;
4072 case NONE:
4073 return NULL;
4074 }
4075 return NULL;
4076 }
4077
4078 // The following two methods implement a "foreach b in successors" cycle.
4079 void InitializeSuccessors() {
4080 loop_index = 0;
4081 loop_length = 0;
4082 successor_iterator = HSuccessorIterator(block_->end());
4083 }
4084
4085 HBasicBlock* AdvanceSuccessors() {
4086 if (!successor_iterator.Done()) {
4087 HBasicBlock* result = successor_iterator.Current();
4088 successor_iterator.Advance();
4089 return result;
4090 }
4091 return NULL;
4092 }
4093
4094 // The following two methods implement a "foreach b in loop members" cycle.
4095 void InitializeLoopMembers() {
4096 loop_index = 0;
4097 loop_length = loop_->blocks()->length();
4098 }
4099
4100 HBasicBlock* AdvanceLoopMembers() {
4101 if (loop_index < loop_length) {
4102 HBasicBlock* result = loop_->blocks()->at(loop_index);
4103 loop_index++;
4104 return result;
4105 } else {
4106 return NULL;
4107 }
4108 }
4109
4110 LoopKind kind_;
4111 PostorderProcessor* father_;
4112 PostorderProcessor* child_;
4113 HLoopInformation* loop_;
4114 HBasicBlock* block_;
4115 HBasicBlock* loop_header_;
4116 int loop_index;
4117 int loop_length;
4118 HSuccessorIterator successor_iterator;
4119};
4120
4121
4122void HGraph::OrderBlocks() {
4123 CompilationPhase phase("H_Block ordering", info());
4124
4125#ifdef DEBUG
4126 // Initially the blocks must not be ordered.
4127 for (int i = 0; i < blocks_.length(); ++i) {
4128 DCHECK(!blocks_[i]->IsOrdered());
4129 }
4130#endif
4131
4132 PostorderProcessor* postorder =
4133 PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
4134 blocks_.Rewind(0);
4135 while (postorder) {
4136 postorder = postorder->PerformStep(zone(), &blocks_);
4137 }
4138
4139#ifdef DEBUG
4140 // Now all blocks must be marked as ordered.
4141 for (int i = 0; i < blocks_.length(); ++i) {
4142 DCHECK(blocks_[i]->IsOrdered());
4143 }
4144#endif
4145
4146 // Reverse block list and assign block IDs.
4147 for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
4148 HBasicBlock* bi = blocks_[i];
4149 HBasicBlock* bj = blocks_[j];
4150 bi->set_block_id(j);
4151 bj->set_block_id(i);
4152 blocks_[i] = bj;
4153 blocks_[j] = bi;
4154 }
4155}
4156
4157
4158void HGraph::AssignDominators() {
4159 HPhase phase("H_Assign dominators", this);
4160 for (int i = 0; i < blocks_.length(); ++i) {
4161 HBasicBlock* block = blocks_[i];
4162 if (block->IsLoopHeader()) {
4163 // Only the first predecessor of a loop header is from outside the loop.
4164 // All others are back edges, and thus cannot dominate the loop header.
4165 block->AssignCommonDominator(block->predecessors()->first());
4166 block->AssignLoopSuccessorDominators();
4167 } else {
4168 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
4169 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
4170 }
4171 }
4172 }
4173}
4174
4175
4176bool HGraph::CheckArgumentsPhiUses() {
4177 int block_count = blocks_.length();
4178 for (int i = 0; i < block_count; ++i) {
4179 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4180 HPhi* phi = blocks_[i]->phis()->at(j);
4181 // We don't support phi uses of arguments for now.
4182 if (phi->CheckFlag(HValue::kIsArguments)) return false;
4183 }
4184 }
4185 return true;
4186}
4187
4188
4189bool HGraph::CheckConstPhiUses() {
4190 int block_count = blocks_.length();
4191 for (int i = 0; i < block_count; ++i) {
4192 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4193 HPhi* phi = blocks_[i]->phis()->at(j);
4194 // Check for the hole value (from an uninitialized const).
4195 for (int k = 0; k < phi->OperandCount(); k++) {
4196 if (phi->OperandAt(k) == GetConstantHole()) return false;
4197 }
4198 }
4199 }
4200 return true;
4201}
4202
4203
4204void HGraph::CollectPhis() {
4205 int block_count = blocks_.length();
4206 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
4207 for (int i = 0; i < block_count; ++i) {
4208 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4209 HPhi* phi = blocks_[i]->phis()->at(j);
4210 phi_list_->Add(phi, zone());
4211 }
4212 }
4213}
4214
4215
4216// Implementation of utility class to encapsulate the translation state for
4217// a (possibly inlined) function.
4218FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
4219 CompilationInfo* info, InliningKind inlining_kind,
Ben Murdochda12d292016-06-02 14:46:10 +01004220 int inlining_id, TailCallMode tail_call_mode)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004221 : owner_(owner),
4222 compilation_info_(info),
4223 call_context_(NULL),
4224 inlining_kind_(inlining_kind),
Ben Murdochda12d292016-06-02 14:46:10 +01004225 tail_call_mode_(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004226 function_return_(NULL),
4227 test_context_(NULL),
4228 entry_(NULL),
4229 arguments_object_(NULL),
4230 arguments_elements_(NULL),
4231 inlining_id_(inlining_id),
4232 outer_source_position_(SourcePosition::Unknown()),
Ben Murdochda12d292016-06-02 14:46:10 +01004233 do_expression_scope_count_(0),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004234 outer_(owner->function_state()) {
4235 if (outer_ != NULL) {
4236 // State for an inline function.
4237 if (owner->ast_context()->IsTest()) {
4238 HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
4239 HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
4240 if_true->MarkAsInlineReturnTarget(owner->current_block());
4241 if_false->MarkAsInlineReturnTarget(owner->current_block());
4242 TestContext* outer_test_context = TestContext::cast(owner->ast_context());
4243 Expression* cond = outer_test_context->condition();
4244 // The AstContext constructor pushed on the context stack. This newed
4245 // instance is the reason that AstContext can't be BASE_EMBEDDED.
4246 test_context_ = new TestContext(owner, cond, if_true, if_false);
4247 } else {
4248 function_return_ = owner->graph()->CreateBasicBlock();
4249 function_return()->MarkAsInlineReturnTarget(owner->current_block());
4250 }
4251 // Set this after possibly allocating a new TestContext above.
4252 call_context_ = owner->ast_context();
4253 }
4254
4255 // Push on the state stack.
4256 owner->set_function_state(this);
4257
4258 if (compilation_info_->is_tracking_positions()) {
4259 outer_source_position_ = owner->source_position();
4260 owner->EnterInlinedSource(
4261 info->shared_info()->start_position(),
4262 inlining_id);
4263 owner->SetSourcePosition(info->shared_info()->start_position());
4264 }
4265}
4266
4267
4268FunctionState::~FunctionState() {
4269 delete test_context_;
4270 owner_->set_function_state(outer_);
4271
4272 if (compilation_info_->is_tracking_positions()) {
4273 owner_->set_source_position(outer_source_position_);
4274 owner_->EnterInlinedSource(
4275 outer_->compilation_info()->shared_info()->start_position(),
4276 outer_->inlining_id());
4277 }
4278}
4279
4280
4281// Implementation of utility classes to represent an expression's context in
4282// the AST.
4283AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
4284 : owner_(owner),
4285 kind_(kind),
4286 outer_(owner->ast_context()),
4287 typeof_mode_(NOT_INSIDE_TYPEOF) {
4288 owner->set_ast_context(this); // Push.
4289#ifdef DEBUG
Ben Murdochda12d292016-06-02 14:46:10 +01004290 DCHECK_EQ(JS_FUNCTION, owner->environment()->frame_type());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004291 original_length_ = owner->environment()->length();
4292#endif
4293}
4294
4295
4296AstContext::~AstContext() {
4297 owner_->set_ast_context(outer_); // Pop.
4298}
4299
4300
4301EffectContext::~EffectContext() {
Ben Murdochda12d292016-06-02 14:46:10 +01004302 DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004303 (owner()->environment()->length() == original_length_ &&
Ben Murdochda12d292016-06-02 14:46:10 +01004304 (owner()->environment()->frame_type() == JS_FUNCTION ||
4305 owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004306}
4307
4308
4309ValueContext::~ValueContext() {
Ben Murdochda12d292016-06-02 14:46:10 +01004310 DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004311 (owner()->environment()->length() == original_length_ + 1 &&
Ben Murdochda12d292016-06-02 14:46:10 +01004312 (owner()->environment()->frame_type() == JS_FUNCTION ||
4313 owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004314}
4315
4316
4317void EffectContext::ReturnValue(HValue* value) {
4318 // The value is simply ignored.
4319}
4320
4321
4322void ValueContext::ReturnValue(HValue* value) {
4323 // The value is tracked in the bailout environment, and communicated
4324 // through the environment as the result of the expression.
4325 if (value->CheckFlag(HValue::kIsArguments)) {
4326 if (flag_ == ARGUMENTS_FAKED) {
4327 value = owner()->graph()->GetConstantUndefined();
4328 } else if (!arguments_allowed()) {
4329 owner()->Bailout(kBadValueContextForArgumentsValue);
4330 }
4331 }
4332 owner()->Push(value);
4333}
4334
4335
4336void TestContext::ReturnValue(HValue* value) {
4337 BuildBranch(value);
4338}
4339
4340
4341void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4342 DCHECK(!instr->IsControlInstruction());
4343 owner()->AddInstruction(instr);
4344 if (instr->HasObservableSideEffects()) {
4345 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4346 }
4347}
4348
4349
4350void EffectContext::ReturnControl(HControlInstruction* instr,
4351 BailoutId ast_id) {
4352 DCHECK(!instr->HasObservableSideEffects());
4353 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4354 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4355 instr->SetSuccessorAt(0, empty_true);
4356 instr->SetSuccessorAt(1, empty_false);
4357 owner()->FinishCurrentBlock(instr);
4358 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
4359 owner()->set_current_block(join);
4360}
4361
4362
4363void EffectContext::ReturnContinuation(HIfContinuation* continuation,
4364 BailoutId ast_id) {
4365 HBasicBlock* true_branch = NULL;
4366 HBasicBlock* false_branch = NULL;
4367 continuation->Continue(&true_branch, &false_branch);
4368 if (!continuation->IsTrueReachable()) {
4369 owner()->set_current_block(false_branch);
4370 } else if (!continuation->IsFalseReachable()) {
4371 owner()->set_current_block(true_branch);
4372 } else {
4373 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
4374 owner()->set_current_block(join);
4375 }
4376}
4377
4378
4379void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4380 DCHECK(!instr->IsControlInstruction());
4381 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4382 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4383 }
4384 owner()->AddInstruction(instr);
4385 owner()->Push(instr);
4386 if (instr->HasObservableSideEffects()) {
4387 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4388 }
4389}
4390
4391
4392void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4393 DCHECK(!instr->HasObservableSideEffects());
4394 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4395 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4396 }
4397 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
4398 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
4399 instr->SetSuccessorAt(0, materialize_true);
4400 instr->SetSuccessorAt(1, materialize_false);
4401 owner()->FinishCurrentBlock(instr);
4402 owner()->set_current_block(materialize_true);
4403 owner()->Push(owner()->graph()->GetConstantTrue());
4404 owner()->set_current_block(materialize_false);
4405 owner()->Push(owner()->graph()->GetConstantFalse());
4406 HBasicBlock* join =
4407 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4408 owner()->set_current_block(join);
4409}
4410
4411
4412void ValueContext::ReturnContinuation(HIfContinuation* continuation,
4413 BailoutId ast_id) {
4414 HBasicBlock* materialize_true = NULL;
4415 HBasicBlock* materialize_false = NULL;
4416 continuation->Continue(&materialize_true, &materialize_false);
4417 if (continuation->IsTrueReachable()) {
4418 owner()->set_current_block(materialize_true);
4419 owner()->Push(owner()->graph()->GetConstantTrue());
4420 owner()->set_current_block(materialize_true);
4421 }
4422 if (continuation->IsFalseReachable()) {
4423 owner()->set_current_block(materialize_false);
4424 owner()->Push(owner()->graph()->GetConstantFalse());
4425 owner()->set_current_block(materialize_false);
4426 }
4427 if (continuation->TrueAndFalseReachable()) {
4428 HBasicBlock* join =
4429 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4430 owner()->set_current_block(join);
4431 }
4432}
4433
4434
4435void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4436 DCHECK(!instr->IsControlInstruction());
4437 HOptimizedGraphBuilder* builder = owner();
4438 builder->AddInstruction(instr);
4439 // We expect a simulate after every expression with side effects, though
4440 // this one isn't actually needed (and wouldn't work if it were targeted).
4441 if (instr->HasObservableSideEffects()) {
4442 builder->Push(instr);
4443 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4444 builder->Pop();
4445 }
4446 BuildBranch(instr);
4447}
4448
4449
4450void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4451 DCHECK(!instr->HasObservableSideEffects());
4452 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4453 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4454 instr->SetSuccessorAt(0, empty_true);
4455 instr->SetSuccessorAt(1, empty_false);
4456 owner()->FinishCurrentBlock(instr);
4457 owner()->Goto(empty_true, if_true(), owner()->function_state());
4458 owner()->Goto(empty_false, if_false(), owner()->function_state());
4459 owner()->set_current_block(NULL);
4460}
4461
4462
4463void TestContext::ReturnContinuation(HIfContinuation* continuation,
4464 BailoutId ast_id) {
4465 HBasicBlock* true_branch = NULL;
4466 HBasicBlock* false_branch = NULL;
4467 continuation->Continue(&true_branch, &false_branch);
4468 if (continuation->IsTrueReachable()) {
4469 owner()->Goto(true_branch, if_true(), owner()->function_state());
4470 }
4471 if (continuation->IsFalseReachable()) {
4472 owner()->Goto(false_branch, if_false(), owner()->function_state());
4473 }
4474 owner()->set_current_block(NULL);
4475}
4476
4477
4478void TestContext::BuildBranch(HValue* value) {
4479 // We expect the graph to be in edge-split form: there is no edge that
4480 // connects a branch node to a join node. We conservatively ensure that
4481 // property by always adding an empty block on the outgoing edges of this
4482 // branch.
4483 HOptimizedGraphBuilder* builder = owner();
4484 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
4485 builder->Bailout(kArgumentsObjectValueInATestContext);
4486 }
Ben Murdochda12d292016-06-02 14:46:10 +01004487 ToBooleanICStub::Types expected(condition()->to_boolean_types());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004488 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
4489}
4490
4491
4492// HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
4493#define CHECK_BAILOUT(call) \
4494 do { \
4495 call; \
4496 if (HasStackOverflow()) return; \
4497 } while (false)
4498
4499
4500#define CHECK_ALIVE(call) \
4501 do { \
4502 call; \
4503 if (HasStackOverflow() || current_block() == NULL) return; \
4504 } while (false)
4505
4506
4507#define CHECK_ALIVE_OR_RETURN(call, value) \
4508 do { \
4509 call; \
4510 if (HasStackOverflow() || current_block() == NULL) return value; \
4511 } while (false)
4512
4513
4514void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
4515 current_info()->AbortOptimization(reason);
4516 SetStackOverflow();
4517}
4518
4519
4520void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
4521 EffectContext for_effect(this);
4522 Visit(expr);
4523}
4524
4525
4526void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
4527 ArgumentsAllowedFlag flag) {
4528 ValueContext for_value(this, flag);
4529 Visit(expr);
4530}
4531
4532
4533void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
4534 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
4535 for_value.set_typeof_mode(INSIDE_TYPEOF);
4536 Visit(expr);
4537}
4538
4539
4540void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
4541 HBasicBlock* true_block,
4542 HBasicBlock* false_block) {
4543 TestContext for_control(this, expr, true_block, false_block);
4544 Visit(expr);
4545}
4546
4547
4548void HOptimizedGraphBuilder::VisitExpressions(
4549 ZoneList<Expression*>* exprs) {
4550 for (int i = 0; i < exprs->length(); ++i) {
4551 CHECK_ALIVE(VisitForValue(exprs->at(i)));
4552 }
4553}
4554
4555
4556void HOptimizedGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs,
4557 ArgumentsAllowedFlag flag) {
4558 for (int i = 0; i < exprs->length(); ++i) {
4559 CHECK_ALIVE(VisitForValue(exprs->at(i), flag));
4560 }
4561}
4562
4563
4564bool HOptimizedGraphBuilder::BuildGraph() {
4565 if (IsSubclassConstructor(current_info()->literal()->kind())) {
4566 Bailout(kSuperReference);
4567 return false;
4568 }
4569
4570 Scope* scope = current_info()->scope();
4571 SetUpScope(scope);
4572
4573 // Add an edge to the body entry. This is warty: the graph's start
4574 // environment will be used by the Lithium translation as the initial
4575 // environment on graph entry, but it has now been mutated by the
4576 // Hydrogen translation of the instructions in the start block. This
4577 // environment uses values which have not been defined yet. These
4578 // Hydrogen instructions will then be replayed by the Lithium
4579 // translation, so they cannot have an environment effect. The edge to
4580 // the body's entry block (along with some special logic for the start
4581 // block in HInstruction::InsertAfter) seals the start block from
4582 // getting unwanted instructions inserted.
4583 //
4584 // TODO(kmillikin): Fix this. Stop mutating the initial environment.
4585 // Make the Hydrogen instructions in the initial block into Hydrogen
4586 // values (but not instructions), present in the initial environment and
4587 // not replayed by the Lithium translation.
4588 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4589 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4590 Goto(body_entry);
4591 body_entry->SetJoinId(BailoutId::FunctionEntry());
4592 set_current_block(body_entry);
4593
4594 VisitDeclarations(scope->declarations());
4595 Add<HSimulate>(BailoutId::Declarations());
4596
4597 Add<HStackCheck>(HStackCheck::kFunctionEntry);
4598
4599 VisitStatements(current_info()->literal()->body());
4600 if (HasStackOverflow()) return false;
4601
4602 if (current_block() != NULL) {
4603 Add<HReturn>(graph()->GetConstantUndefined());
4604 set_current_block(NULL);
4605 }
4606
4607 // If the checksum of the number of type info changes is the same as the
4608 // last time this function was compiled, then this recompile is likely not
4609 // due to missing/inadequate type feedback, but rather too aggressive
4610 // optimization. Disable optimistic LICM in that case.
4611 Handle<Code> unoptimized_code(current_info()->shared_info()->code());
4612 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
4613 Handle<TypeFeedbackInfo> type_info(
4614 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
4615 int checksum = type_info->own_type_change_checksum();
4616 int composite_checksum = graph()->update_type_change_checksum(checksum);
4617 graph()->set_use_optimistic_licm(
4618 !type_info->matches_inlined_type_change_checksum(composite_checksum));
4619 type_info->set_inlined_type_change_checksum(composite_checksum);
4620
Ben Murdochc5610432016-08-08 18:44:38 +01004621 // Set this predicate early to avoid handle deref during graph optimization.
4622 graph()->set_allow_code_motion(
4623 current_info()->IsStub() ||
4624 current_info()->shared_info()->opt_count() + 1 < FLAG_max_opt_count);
4625
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004626 // Perform any necessary OSR-specific cleanups or changes to the graph.
4627 osr()->FinishGraph();
4628
4629 return true;
4630}
4631
4632
4633bool HGraph::Optimize(BailoutReason* bailout_reason) {
4634 OrderBlocks();
4635 AssignDominators();
4636
4637 // We need to create a HConstant "zero" now so that GVN will fold every
4638 // zero-valued constant in the graph together.
4639 // The constant is needed to make idef-based bounds check work: the pass
4640 // evaluates relations with "zero" and that zero cannot be created after GVN.
4641 GetConstant0();
4642
4643#ifdef DEBUG
4644 // Do a full verify after building the graph and computing dominators.
4645 Verify(true);
4646#endif
4647
4648 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4649 Run<HEnvironmentLivenessAnalysisPhase>();
4650 }
4651
4652 if (!CheckConstPhiUses()) {
4653 *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4654 return false;
4655 }
4656 Run<HRedundantPhiEliminationPhase>();
4657 if (!CheckArgumentsPhiUses()) {
4658 *bailout_reason = kUnsupportedPhiUseOfArguments;
4659 return false;
4660 }
4661
4662 // Find and mark unreachable code to simplify optimizations, especially gvn,
4663 // where unreachable code could unnecessarily defeat LICM.
4664 Run<HMarkUnreachableBlocksPhase>();
4665
4666 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4667 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4668
4669 if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4670
4671 CollectPhis();
4672
4673 if (has_osr()) osr()->FinishOsrValues();
4674
4675 Run<HInferRepresentationPhase>();
4676
4677 // Remove HSimulate instructions that have turned out not to be needed
4678 // after all by folding them into the following HSimulate.
4679 // This must happen after inferring representations.
4680 Run<HMergeRemovableSimulatesPhase>();
4681
4682 Run<HMarkDeoptimizeOnUndefinedPhase>();
4683 Run<HRepresentationChangesPhase>();
4684
4685 Run<HInferTypesPhase>();
4686
4687 // Must be performed before canonicalization to ensure that Canonicalize
4688 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4689 // zero.
4690 Run<HUint32AnalysisPhase>();
4691
4692 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4693
4694 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4695
4696 if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4697
4698 if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4699
4700 Run<HRangeAnalysisPhase>();
4701
4702 Run<HComputeChangeUndefinedToNaN>();
4703
4704 // Eliminate redundant stack checks on backwards branches.
4705 Run<HStackCheckEliminationPhase>();
4706
4707 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004708 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4709 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4710
4711 RestoreActualValues();
4712
4713 // Find unreachable code a second time, GVN and other optimizations may have
4714 // made blocks unreachable that were previously reachable.
4715 Run<HMarkUnreachableBlocksPhase>();
4716
4717 return true;
4718}
4719
4720
4721void HGraph::RestoreActualValues() {
4722 HPhase phase("H_Restore actual values", this);
4723
4724 for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4725 HBasicBlock* block = blocks()->at(block_index);
4726
4727#ifdef DEBUG
4728 for (int i = 0; i < block->phis()->length(); i++) {
4729 HPhi* phi = block->phis()->at(i);
4730 DCHECK(phi->ActualValue() == phi);
4731 }
4732#endif
4733
4734 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4735 HInstruction* instruction = it.Current();
4736 if (instruction->ActualValue() == instruction) continue;
4737 if (instruction->CheckFlag(HValue::kIsDead)) {
4738 // The instruction was marked as deleted but left in the graph
4739 // as a control flow dependency point for subsequent
4740 // instructions.
4741 instruction->DeleteAndReplaceWith(instruction->ActualValue());
4742 } else {
4743 DCHECK(instruction->IsInformativeDefinition());
4744 if (instruction->IsPurelyInformativeDefinition()) {
4745 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4746 } else {
4747 instruction->ReplaceAllUsesWith(instruction->ActualValue());
4748 }
4749 }
4750 }
4751 }
4752}
4753
4754
4755void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4756 ZoneList<HValue*> arguments(count, zone());
4757 for (int i = 0; i < count; ++i) {
4758 arguments.Add(Pop(), zone());
4759 }
4760
4761 HPushArguments* push_args = New<HPushArguments>();
4762 while (!arguments.is_empty()) {
4763 push_args->AddInput(arguments.RemoveLast());
4764 }
4765 AddInstruction(push_args);
4766}
4767
4768
4769template <class Instruction>
4770HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4771 PushArgumentsFromEnvironment(call->argument_count());
4772 return call;
4773}
4774
4775
4776void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
4777 HEnvironment* prolog_env = environment();
4778 int parameter_count = environment()->parameter_count();
4779 ZoneList<HValue*> parameters(parameter_count, zone());
4780 for (int i = 0; i < parameter_count; ++i) {
4781 HInstruction* parameter = Add<HParameter>(static_cast<unsigned>(i));
4782 parameters.Add(parameter, zone());
4783 environment()->Bind(i, parameter);
4784 }
4785
4786 HConstant* undefined_constant = graph()->GetConstantUndefined();
4787 // Initialize specials and locals to undefined.
4788 for (int i = parameter_count + 1; i < environment()->length(); ++i) {
4789 environment()->Bind(i, undefined_constant);
4790 }
4791 Add<HPrologue>();
4792
4793 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4794 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4795 GotoNoSimulate(body_entry);
4796 set_current_block(body_entry);
4797
4798 // Initialize context of prolog environment to undefined.
4799 prolog_env->BindContext(undefined_constant);
4800
4801 // First special is HContext.
4802 HInstruction* context = Add<HContext>();
4803 environment()->BindContext(context);
4804
4805 // Create an arguments object containing the initial parameters. Set the
4806 // initial values of parameters including "this" having parameter index 0.
4807 DCHECK_EQ(scope->num_parameters() + 1, parameter_count);
4808 HArgumentsObject* arguments_object = New<HArgumentsObject>(parameter_count);
4809 for (int i = 0; i < parameter_count; ++i) {
4810 HValue* parameter = parameters.at(i);
4811 arguments_object->AddArgument(parameter, zone());
4812 }
4813
4814 AddInstruction(arguments_object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004815
4816 // Handle the arguments and arguments shadow variables specially (they do
4817 // not have declarations).
4818 if (scope->arguments() != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01004819 environment()->Bind(scope->arguments(), arguments_object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004820 }
4821
4822 int rest_index;
4823 Variable* rest = scope->rest_parameter(&rest_index);
4824 if (rest) {
4825 return Bailout(kRestParameter);
4826 }
4827
4828 if (scope->this_function_var() != nullptr ||
4829 scope->new_target_var() != nullptr) {
4830 return Bailout(kSuperReference);
4831 }
4832
4833 // Trace the call.
4834 if (FLAG_trace && top_info()->IsOptimizing()) {
4835 Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kTraceEnter), 0);
4836 }
4837}
4838
4839
4840void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4841 for (int i = 0; i < statements->length(); i++) {
4842 Statement* stmt = statements->at(i);
4843 CHECK_ALIVE(Visit(stmt));
4844 if (stmt->IsJump()) break;
4845 }
4846}
4847
4848
4849void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4850 DCHECK(!HasStackOverflow());
4851 DCHECK(current_block() != NULL);
4852 DCHECK(current_block()->HasPredecessor());
4853
4854 Scope* outer_scope = scope();
4855 Scope* scope = stmt->scope();
4856 BreakAndContinueInfo break_info(stmt, outer_scope);
4857
4858 { BreakAndContinueScope push(&break_info, this);
4859 if (scope != NULL) {
4860 if (scope->NeedsContext()) {
4861 // Load the function object.
4862 Scope* declaration_scope = scope->DeclarationScope();
4863 HInstruction* function;
4864 HValue* outer_context = environment()->context();
4865 if (declaration_scope->is_script_scope() ||
4866 declaration_scope->is_eval_scope()) {
4867 function = new (zone())
4868 HLoadContextSlot(outer_context, Context::CLOSURE_INDEX,
4869 HLoadContextSlot::kNoCheck);
4870 } else {
4871 function = New<HThisFunction>();
4872 }
4873 AddInstruction(function);
4874 // Allocate a block context and store it to the stack frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004875 HValue* scope_info = Add<HConstant>(scope->GetScopeInfo(isolate()));
4876 Add<HPushArguments>(scope_info, function);
4877 HInstruction* inner_context = Add<HCallRuntime>(
4878 Runtime::FunctionForId(Runtime::kPushBlockContext), 2);
4879 inner_context->SetFlag(HValue::kHasNoObservableSideEffects);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004880 set_scope(scope);
4881 environment()->BindContext(inner_context);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004882 }
4883 VisitDeclarations(scope->declarations());
4884 AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
4885 }
4886 CHECK_BAILOUT(VisitStatements(stmt->statements()));
4887 }
4888 set_scope(outer_scope);
4889 if (scope != NULL && current_block() != NULL &&
4890 scope->ContextLocalCount() > 0) {
4891 HValue* inner_context = environment()->context();
4892 HValue* outer_context = Add<HLoadNamedField>(
4893 inner_context, nullptr,
4894 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4895
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004896 environment()->BindContext(outer_context);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004897 }
4898 HBasicBlock* break_block = break_info.break_block();
4899 if (break_block != NULL) {
4900 if (current_block() != NULL) Goto(break_block);
4901 break_block->SetJoinId(stmt->ExitId());
4902 set_current_block(break_block);
4903 }
4904}
4905
4906
4907void HOptimizedGraphBuilder::VisitExpressionStatement(
4908 ExpressionStatement* stmt) {
4909 DCHECK(!HasStackOverflow());
4910 DCHECK(current_block() != NULL);
4911 DCHECK(current_block()->HasPredecessor());
4912 VisitForEffect(stmt->expression());
4913}
4914
4915
4916void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4917 DCHECK(!HasStackOverflow());
4918 DCHECK(current_block() != NULL);
4919 DCHECK(current_block()->HasPredecessor());
4920}
4921
4922
4923void HOptimizedGraphBuilder::VisitSloppyBlockFunctionStatement(
4924 SloppyBlockFunctionStatement* stmt) {
4925 Visit(stmt->statement());
4926}
4927
4928
4929void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4930 DCHECK(!HasStackOverflow());
4931 DCHECK(current_block() != NULL);
4932 DCHECK(current_block()->HasPredecessor());
4933 if (stmt->condition()->ToBooleanIsTrue()) {
4934 Add<HSimulate>(stmt->ThenId());
4935 Visit(stmt->then_statement());
4936 } else if (stmt->condition()->ToBooleanIsFalse()) {
4937 Add<HSimulate>(stmt->ElseId());
4938 Visit(stmt->else_statement());
4939 } else {
4940 HBasicBlock* cond_true = graph()->CreateBasicBlock();
4941 HBasicBlock* cond_false = graph()->CreateBasicBlock();
4942 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4943
Ben Murdochda12d292016-06-02 14:46:10 +01004944 // Technically, we should be able to handle the case when one side of
4945 // the test is not connected, but this can trip up liveness analysis
4946 // if we did not fully connect the test context based on some optimistic
4947 // assumption. If such an assumption was violated, we would end up with
4948 // an environment with optimized-out values. So we should always
4949 // conservatively connect the test context.
4950 CHECK(cond_true->HasPredecessor());
4951 CHECK(cond_false->HasPredecessor());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004952
Ben Murdochda12d292016-06-02 14:46:10 +01004953 cond_true->SetJoinId(stmt->ThenId());
4954 set_current_block(cond_true);
4955 CHECK_BAILOUT(Visit(stmt->then_statement()));
4956 cond_true = current_block();
4957
4958 cond_false->SetJoinId(stmt->ElseId());
4959 set_current_block(cond_false);
4960 CHECK_BAILOUT(Visit(stmt->else_statement()));
4961 cond_false = current_block();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004962
4963 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4964 set_current_block(join);
4965 }
4966}
4967
4968
4969HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4970 BreakableStatement* stmt,
4971 BreakType type,
4972 Scope** scope,
4973 int* drop_extra) {
4974 *drop_extra = 0;
4975 BreakAndContinueScope* current = this;
4976 while (current != NULL && current->info()->target() != stmt) {
4977 *drop_extra += current->info()->drop_extra();
4978 current = current->next();
4979 }
4980 DCHECK(current != NULL); // Always found (unless stack is malformed).
4981 *scope = current->info()->scope();
4982
4983 if (type == BREAK) {
4984 *drop_extra += current->info()->drop_extra();
4985 }
4986
4987 HBasicBlock* block = NULL;
4988 switch (type) {
4989 case BREAK:
4990 block = current->info()->break_block();
4991 if (block == NULL) {
4992 block = current->owner()->graph()->CreateBasicBlock();
4993 current->info()->set_break_block(block);
4994 }
4995 break;
4996
4997 case CONTINUE:
4998 block = current->info()->continue_block();
4999 if (block == NULL) {
5000 block = current->owner()->graph()->CreateBasicBlock();
5001 current->info()->set_continue_block(block);
5002 }
5003 break;
5004 }
5005
5006 return block;
5007}
5008
5009
5010void HOptimizedGraphBuilder::VisitContinueStatement(
5011 ContinueStatement* stmt) {
5012 DCHECK(!HasStackOverflow());
5013 DCHECK(current_block() != NULL);
5014 DCHECK(current_block()->HasPredecessor());
Ben Murdochda12d292016-06-02 14:46:10 +01005015
5016 if (function_state()->IsInsideDoExpressionScope()) {
5017 return Bailout(kDoExpressionUnmodelable);
5018 }
5019
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005020 Scope* outer_scope = NULL;
5021 Scope* inner_scope = scope();
5022 int drop_extra = 0;
5023 HBasicBlock* continue_block = break_scope()->Get(
5024 stmt->target(), BreakAndContinueScope::CONTINUE,
5025 &outer_scope, &drop_extra);
5026 HValue* context = environment()->context();
5027 Drop(drop_extra);
5028 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
5029 if (context_pop_count > 0) {
5030 while (context_pop_count-- > 0) {
5031 HInstruction* context_instruction = Add<HLoadNamedField>(
5032 context, nullptr,
5033 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5034 context = context_instruction;
5035 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005036 environment()->BindContext(context);
5037 }
5038
5039 Goto(continue_block);
5040 set_current_block(NULL);
5041}
5042
5043
5044void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
5045 DCHECK(!HasStackOverflow());
5046 DCHECK(current_block() != NULL);
5047 DCHECK(current_block()->HasPredecessor());
Ben Murdochda12d292016-06-02 14:46:10 +01005048
5049 if (function_state()->IsInsideDoExpressionScope()) {
5050 return Bailout(kDoExpressionUnmodelable);
5051 }
5052
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005053 Scope* outer_scope = NULL;
5054 Scope* inner_scope = scope();
5055 int drop_extra = 0;
5056 HBasicBlock* break_block = break_scope()->Get(
5057 stmt->target(), BreakAndContinueScope::BREAK,
5058 &outer_scope, &drop_extra);
5059 HValue* context = environment()->context();
5060 Drop(drop_extra);
5061 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
5062 if (context_pop_count > 0) {
5063 while (context_pop_count-- > 0) {
5064 HInstruction* context_instruction = Add<HLoadNamedField>(
5065 context, nullptr,
5066 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5067 context = context_instruction;
5068 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005069 environment()->BindContext(context);
5070 }
5071 Goto(break_block);
5072 set_current_block(NULL);
5073}
5074
5075
5076void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
5077 DCHECK(!HasStackOverflow());
5078 DCHECK(current_block() != NULL);
5079 DCHECK(current_block()->HasPredecessor());
5080 FunctionState* state = function_state();
5081 AstContext* context = call_context();
5082 if (context == NULL) {
5083 // Not an inlined return, so an actual one.
5084 CHECK_ALIVE(VisitForValue(stmt->expression()));
5085 HValue* result = environment()->Pop();
5086 Add<HReturn>(result);
5087 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
5088 // Return from an inlined construct call. In a test context the return value
5089 // will always evaluate to true, in a value context the return value needs
5090 // to be a JSObject.
5091 if (context->IsTest()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005092 CHECK_ALIVE(VisitForEffect(stmt->expression()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005093 context->ReturnValue(graph()->GetConstantTrue());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005094 } else if (context->IsEffect()) {
5095 CHECK_ALIVE(VisitForEffect(stmt->expression()));
5096 Goto(function_return(), state);
5097 } else {
5098 DCHECK(context->IsValue());
5099 CHECK_ALIVE(VisitForValue(stmt->expression()));
5100 HValue* return_value = Pop();
5101 HValue* receiver = environment()->arguments_environment()->Lookup(0);
5102 HHasInstanceTypeAndBranch* typecheck =
5103 New<HHasInstanceTypeAndBranch>(return_value,
5104 FIRST_JS_RECEIVER_TYPE,
5105 LAST_JS_RECEIVER_TYPE);
5106 HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
5107 HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
5108 typecheck->SetSuccessorAt(0, if_spec_object);
5109 typecheck->SetSuccessorAt(1, not_spec_object);
5110 FinishCurrentBlock(typecheck);
5111 AddLeaveInlined(if_spec_object, return_value, state);
5112 AddLeaveInlined(not_spec_object, receiver, state);
5113 }
5114 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
5115 // Return from an inlined setter call. The returned value is never used, the
5116 // value of an assignment is always the value of the RHS of the assignment.
5117 CHECK_ALIVE(VisitForEffect(stmt->expression()));
5118 if (context->IsTest()) {
5119 HValue* rhs = environment()->arguments_environment()->Lookup(1);
5120 context->ReturnValue(rhs);
5121 } else if (context->IsEffect()) {
5122 Goto(function_return(), state);
5123 } else {
5124 DCHECK(context->IsValue());
5125 HValue* rhs = environment()->arguments_environment()->Lookup(1);
5126 AddLeaveInlined(rhs, state);
5127 }
5128 } else {
5129 // Return from a normal inlined function. Visit the subexpression in the
5130 // expression context of the call.
5131 if (context->IsTest()) {
5132 TestContext* test = TestContext::cast(context);
5133 VisitForControl(stmt->expression(), test->if_true(), test->if_false());
5134 } else if (context->IsEffect()) {
5135 // Visit in value context and ignore the result. This is needed to keep
5136 // environment in sync with full-codegen since some visitors (e.g.
5137 // VisitCountOperation) use the operand stack differently depending on
5138 // context.
5139 CHECK_ALIVE(VisitForValue(stmt->expression()));
5140 Pop();
5141 Goto(function_return(), state);
5142 } else {
5143 DCHECK(context->IsValue());
5144 CHECK_ALIVE(VisitForValue(stmt->expression()));
5145 AddLeaveInlined(Pop(), state);
5146 }
5147 }
5148 set_current_block(NULL);
5149}
5150
5151
5152void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
5153 DCHECK(!HasStackOverflow());
5154 DCHECK(current_block() != NULL);
5155 DCHECK(current_block()->HasPredecessor());
5156 return Bailout(kWithStatement);
5157}
5158
5159
5160void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
5161 DCHECK(!HasStackOverflow());
5162 DCHECK(current_block() != NULL);
5163 DCHECK(current_block()->HasPredecessor());
5164
5165 ZoneList<CaseClause*>* clauses = stmt->cases();
5166 int clause_count = clauses->length();
5167 ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
5168
5169 CHECK_ALIVE(VisitForValue(stmt->tag()));
5170 Add<HSimulate>(stmt->EntryId());
5171 HValue* tag_value = Top();
Ben Murdochc5610432016-08-08 18:44:38 +01005172 Type* tag_type = bounds_.get(stmt->tag()).lower;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005173
5174 // 1. Build all the tests, with dangling true branches
5175 BailoutId default_id = BailoutId::None();
5176 for (int i = 0; i < clause_count; ++i) {
5177 CaseClause* clause = clauses->at(i);
5178 if (clause->is_default()) {
5179 body_blocks.Add(NULL, zone());
5180 if (default_id.IsNone()) default_id = clause->EntryId();
5181 continue;
5182 }
5183
5184 // Generate a compare and branch.
5185 CHECK_BAILOUT(VisitForValue(clause->label()));
5186 if (current_block() == NULL) return Bailout(kUnsupportedSwitchStatement);
5187 HValue* label_value = Pop();
5188
Ben Murdochc5610432016-08-08 18:44:38 +01005189 Type* label_type = bounds_.get(clause->label()).lower;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005190 Type* combined_type = clause->compare_type();
5191 HControlInstruction* compare = BuildCompareInstruction(
5192 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
5193 combined_type,
5194 ScriptPositionToSourcePosition(stmt->tag()->position()),
5195 ScriptPositionToSourcePosition(clause->label()->position()),
5196 PUSH_BEFORE_SIMULATE, clause->id());
5197
5198 HBasicBlock* next_test_block = graph()->CreateBasicBlock();
5199 HBasicBlock* body_block = graph()->CreateBasicBlock();
5200 body_blocks.Add(body_block, zone());
5201 compare->SetSuccessorAt(0, body_block);
5202 compare->SetSuccessorAt(1, next_test_block);
5203 FinishCurrentBlock(compare);
5204
5205 set_current_block(body_block);
5206 Drop(1); // tag_value
5207
5208 set_current_block(next_test_block);
5209 }
5210
5211 // Save the current block to use for the default or to join with the
5212 // exit.
5213 HBasicBlock* last_block = current_block();
5214 Drop(1); // tag_value
5215
5216 // 2. Loop over the clauses and the linked list of tests in lockstep,
5217 // translating the clause bodies.
5218 HBasicBlock* fall_through_block = NULL;
5219
5220 BreakAndContinueInfo break_info(stmt, scope());
5221 { BreakAndContinueScope push(&break_info, this);
5222 for (int i = 0; i < clause_count; ++i) {
5223 CaseClause* clause = clauses->at(i);
5224
5225 // Identify the block where normal (non-fall-through) control flow
5226 // goes to.
5227 HBasicBlock* normal_block = NULL;
5228 if (clause->is_default()) {
5229 if (last_block == NULL) continue;
5230 normal_block = last_block;
5231 last_block = NULL; // Cleared to indicate we've handled it.
5232 } else {
5233 normal_block = body_blocks[i];
5234 }
5235
5236 if (fall_through_block == NULL) {
5237 set_current_block(normal_block);
5238 } else {
5239 HBasicBlock* join = CreateJoin(fall_through_block,
5240 normal_block,
5241 clause->EntryId());
5242 set_current_block(join);
5243 }
5244
5245 CHECK_BAILOUT(VisitStatements(clause->statements()));
5246 fall_through_block = current_block();
5247 }
5248 }
5249
5250 // Create an up-to-3-way join. Use the break block if it exists since
5251 // it's already a join block.
5252 HBasicBlock* break_block = break_info.break_block();
5253 if (break_block == NULL) {
5254 set_current_block(CreateJoin(fall_through_block,
5255 last_block,
5256 stmt->ExitId()));
5257 } else {
5258 if (fall_through_block != NULL) Goto(fall_through_block, break_block);
5259 if (last_block != NULL) Goto(last_block, break_block);
5260 break_block->SetJoinId(stmt->ExitId());
5261 set_current_block(break_block);
5262 }
5263}
5264
5265
5266void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
5267 HBasicBlock* loop_entry) {
5268 Add<HSimulate>(stmt->StackCheckId());
5269 HStackCheck* stack_check =
5270 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
5271 DCHECK(loop_entry->IsLoopHeader());
5272 loop_entry->loop_information()->set_stack_check(stack_check);
5273 CHECK_BAILOUT(Visit(stmt->body()));
5274}
5275
5276
5277void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
5278 DCHECK(!HasStackOverflow());
5279 DCHECK(current_block() != NULL);
5280 DCHECK(current_block()->HasPredecessor());
5281 DCHECK(current_block() != NULL);
5282 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5283
5284 BreakAndContinueInfo break_info(stmt, scope());
5285 {
5286 BreakAndContinueScope push(&break_info, this);
5287 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5288 }
5289 HBasicBlock* body_exit =
5290 JoinContinue(stmt, current_block(), break_info.continue_block());
5291 HBasicBlock* loop_successor = NULL;
Ben Murdochda12d292016-06-02 14:46:10 +01005292 if (body_exit != NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005293 set_current_block(body_exit);
5294 loop_successor = graph()->CreateBasicBlock();
5295 if (stmt->cond()->ToBooleanIsFalse()) {
5296 loop_entry->loop_information()->stack_check()->Eliminate();
5297 Goto(loop_successor);
5298 body_exit = NULL;
5299 } else {
5300 // The block for a true condition, the actual predecessor block of the
5301 // back edge.
5302 body_exit = graph()->CreateBasicBlock();
5303 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
5304 }
5305 if (body_exit != NULL && body_exit->HasPredecessor()) {
5306 body_exit->SetJoinId(stmt->BackEdgeId());
5307 } else {
5308 body_exit = NULL;
5309 }
5310 if (loop_successor->HasPredecessor()) {
5311 loop_successor->SetJoinId(stmt->ExitId());
5312 } else {
5313 loop_successor = NULL;
5314 }
5315 }
5316 HBasicBlock* loop_exit = CreateLoop(stmt,
5317 loop_entry,
5318 body_exit,
5319 loop_successor,
5320 break_info.break_block());
5321 set_current_block(loop_exit);
5322}
5323
5324
5325void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
5326 DCHECK(!HasStackOverflow());
5327 DCHECK(current_block() != NULL);
5328 DCHECK(current_block()->HasPredecessor());
5329 DCHECK(current_block() != NULL);
5330 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5331
5332 // If the condition is constant true, do not generate a branch.
5333 HBasicBlock* loop_successor = NULL;
Ben Murdochda12d292016-06-02 14:46:10 +01005334 HBasicBlock* body_entry = graph()->CreateBasicBlock();
5335 loop_successor = graph()->CreateBasicBlock();
5336 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5337 if (body_entry->HasPredecessor()) {
5338 body_entry->SetJoinId(stmt->BodyId());
5339 set_current_block(body_entry);
5340 }
5341 if (loop_successor->HasPredecessor()) {
5342 loop_successor->SetJoinId(stmt->ExitId());
5343 } else {
5344 loop_successor = NULL;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005345 }
5346
5347 BreakAndContinueInfo break_info(stmt, scope());
5348 if (current_block() != NULL) {
5349 BreakAndContinueScope push(&break_info, this);
5350 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5351 }
5352 HBasicBlock* body_exit =
5353 JoinContinue(stmt, current_block(), break_info.continue_block());
5354 HBasicBlock* loop_exit = CreateLoop(stmt,
5355 loop_entry,
5356 body_exit,
5357 loop_successor,
5358 break_info.break_block());
5359 set_current_block(loop_exit);
5360}
5361
5362
5363void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
5364 DCHECK(!HasStackOverflow());
5365 DCHECK(current_block() != NULL);
5366 DCHECK(current_block()->HasPredecessor());
5367 if (stmt->init() != NULL) {
5368 CHECK_ALIVE(Visit(stmt->init()));
5369 }
5370 DCHECK(current_block() != NULL);
5371 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5372
Ben Murdochda12d292016-06-02 14:46:10 +01005373 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
5374 HBasicBlock* body_entry = graph()->CreateBasicBlock();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005375 if (stmt->cond() != NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005376 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5377 if (body_entry->HasPredecessor()) {
5378 body_entry->SetJoinId(stmt->BodyId());
5379 set_current_block(body_entry);
5380 }
5381 if (loop_successor->HasPredecessor()) {
5382 loop_successor->SetJoinId(stmt->ExitId());
5383 } else {
5384 loop_successor = NULL;
5385 }
Ben Murdochda12d292016-06-02 14:46:10 +01005386 } else {
5387 // Create dummy control flow so that variable liveness analysis
5388 // produces teh correct result.
5389 HControlInstruction* branch = New<HBranch>(graph()->GetConstantTrue());
5390 branch->SetSuccessorAt(0, body_entry);
5391 branch->SetSuccessorAt(1, loop_successor);
5392 FinishCurrentBlock(branch);
5393 set_current_block(body_entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005394 }
5395
5396 BreakAndContinueInfo break_info(stmt, scope());
5397 if (current_block() != NULL) {
5398 BreakAndContinueScope push(&break_info, this);
5399 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5400 }
5401 HBasicBlock* body_exit =
5402 JoinContinue(stmt, current_block(), break_info.continue_block());
5403
5404 if (stmt->next() != NULL && body_exit != NULL) {
5405 set_current_block(body_exit);
5406 CHECK_BAILOUT(Visit(stmt->next()));
5407 body_exit = current_block();
5408 }
5409
5410 HBasicBlock* loop_exit = CreateLoop(stmt,
5411 loop_entry,
5412 body_exit,
5413 loop_successor,
5414 break_info.break_block());
5415 set_current_block(loop_exit);
5416}
5417
5418
5419void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
5420 DCHECK(!HasStackOverflow());
5421 DCHECK(current_block() != NULL);
5422 DCHECK(current_block()->HasPredecessor());
5423
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005424 if (!stmt->each()->IsVariableProxy() ||
5425 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
5426 return Bailout(kForInStatementWithNonLocalEachVariable);
5427 }
5428
5429 Variable* each_var = stmt->each()->AsVariableProxy()->var();
5430
5431 CHECK_ALIVE(VisitForValue(stmt->enumerable()));
5432 HValue* enumerable = Top(); // Leave enumerable at the top.
5433
5434 IfBuilder if_undefined_or_null(this);
5435 if_undefined_or_null.If<HCompareObjectEqAndBranch>(
5436 enumerable, graph()->GetConstantUndefined());
5437 if_undefined_or_null.Or();
5438 if_undefined_or_null.If<HCompareObjectEqAndBranch>(
5439 enumerable, graph()->GetConstantNull());
5440 if_undefined_or_null.ThenDeopt(Deoptimizer::kUndefinedOrNullInForIn);
5441 if_undefined_or_null.End();
5442 BuildForInBody(stmt, each_var, enumerable);
5443}
5444
5445
5446void HOptimizedGraphBuilder::BuildForInBody(ForInStatement* stmt,
5447 Variable* each_var,
5448 HValue* enumerable) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005449 Handle<Map> meta_map = isolate()->factory()->meta_map();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005450 bool fast = stmt->for_in_type() == ForInStatement::FAST_FOR_IN;
Ben Murdoch097c5b22016-05-18 11:27:45 +01005451 BuildCheckHeapObject(enumerable);
5452 Add<HCheckInstanceType>(enumerable, HCheckInstanceType::IS_JS_RECEIVER);
5453 Add<HSimulate>(stmt->ToObjectId());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005454 if (fast) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005455 HForInPrepareMap* map = Add<HForInPrepareMap>(enumerable);
5456 Push(map);
5457 Add<HSimulate>(stmt->EnumId());
5458 Drop(1);
5459 Add<HCheckMaps>(map, meta_map);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005460
Ben Murdoch097c5b22016-05-18 11:27:45 +01005461 HForInCacheArray* array = Add<HForInCacheArray>(
5462 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
5463 HValue* enum_length = BuildEnumLength(map);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005464
Ben Murdoch097c5b22016-05-18 11:27:45 +01005465 HForInCacheArray* index_cache = Add<HForInCacheArray>(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005466 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005467 array->set_index_cache(index_cache);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005468
Ben Murdoch097c5b22016-05-18 11:27:45 +01005469 Push(map);
5470 Push(array);
5471 Push(enum_length);
5472 Add<HSimulate>(stmt->PrepareId());
5473 } else {
5474 Runtime::FunctionId function_id = Runtime::kForInEnumerate;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005475 Add<HPushArguments>(enumerable);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005476 HCallRuntime* array =
5477 Add<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005478 Push(array);
5479 Add<HSimulate>(stmt->EnumId());
5480 Drop(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005481
5482 IfBuilder if_fast(this);
5483 if_fast.If<HCompareMap>(array, meta_map);
5484 if_fast.Then();
5485 {
5486 HValue* cache_map = array;
5487 HForInCacheArray* cache = Add<HForInCacheArray>(
5488 enumerable, cache_map, DescriptorArray::kEnumCacheBridgeCacheIndex);
5489 HValue* enum_length = BuildEnumLength(cache_map);
5490 Push(cache_map);
5491 Push(cache);
5492 Push(enum_length);
5493 Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
5494 }
5495 if_fast.Else();
5496 {
5497 Push(graph()->GetConstant1());
5498 Push(array);
5499 Push(AddLoadFixedArrayLength(array));
5500 Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
5501 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005502 }
5503
Ben Murdoch097c5b22016-05-18 11:27:45 +01005504 Push(graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005505
5506 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5507
5508 // Reload the values to ensure we have up-to-date values inside of the loop.
5509 // This is relevant especially for OSR where the values don't come from the
5510 // computation above, but from the OSR entry block.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005511 HValue* index = environment()->ExpressionStackAt(0);
5512 HValue* limit = environment()->ExpressionStackAt(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005513 HValue* array = environment()->ExpressionStackAt(2);
5514 HValue* type = environment()->ExpressionStackAt(3);
5515 enumerable = environment()->ExpressionStackAt(4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005516
5517 // Check that we still have more keys.
5518 HCompareNumericAndBranch* compare_index =
5519 New<HCompareNumericAndBranch>(index, limit, Token::LT);
5520 compare_index->set_observed_input_representation(
5521 Representation::Smi(), Representation::Smi());
5522
5523 HBasicBlock* loop_body = graph()->CreateBasicBlock();
5524 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
5525
5526 compare_index->SetSuccessorAt(0, loop_body);
5527 compare_index->SetSuccessorAt(1, loop_successor);
5528 FinishCurrentBlock(compare_index);
5529
5530 set_current_block(loop_successor);
5531 Drop(5);
5532
5533 set_current_block(loop_body);
5534
Ben Murdoch097c5b22016-05-18 11:27:45 +01005535 // Compute the next enumerated value.
5536 HValue* key = Add<HLoadKeyed>(array, index, index, nullptr, FAST_ELEMENTS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005537
Ben Murdoch097c5b22016-05-18 11:27:45 +01005538 HBasicBlock* continue_block = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005539 if (fast) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005540 // Check if expected map still matches that of the enumerable.
5541 Add<HCheckMapValue>(enumerable, type);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005542 Add<HSimulate>(stmt->FilterId());
Ben Murdoch097c5b22016-05-18 11:27:45 +01005543 } else {
5544 // We need the continue block here to be able to skip over invalidated keys.
5545 continue_block = graph()->CreateBasicBlock();
5546
5547 // We cannot use the IfBuilder here, since we need to be able to jump
5548 // over the loop body in case of undefined result from %ForInFilter,
5549 // and the poor soul that is the IfBuilder get's really confused about
5550 // such "advanced control flow requirements".
5551 HBasicBlock* if_fast = graph()->CreateBasicBlock();
5552 HBasicBlock* if_slow = graph()->CreateBasicBlock();
5553 HBasicBlock* if_slow_pass = graph()->CreateBasicBlock();
5554 HBasicBlock* if_slow_skip = graph()->CreateBasicBlock();
5555 HBasicBlock* if_join = graph()->CreateBasicBlock();
5556
5557 // Check if expected map still matches that of the enumerable.
5558 HValue* enumerable_map =
5559 Add<HLoadNamedField>(enumerable, nullptr, HObjectAccess::ForMap());
5560 FinishCurrentBlock(
5561 New<HCompareObjectEqAndBranch>(enumerable_map, type, if_fast, if_slow));
5562 set_current_block(if_fast);
5563 {
5564 // The enum cache for enumerable is still valid, no need to check key.
5565 Push(key);
5566 Goto(if_join);
5567 }
5568 set_current_block(if_slow);
5569 {
5570 // Check if key is still valid for enumerable.
5571 Add<HPushArguments>(enumerable, key);
5572 Runtime::FunctionId function_id = Runtime::kForInFilter;
5573 Push(Add<HCallRuntime>(Runtime::FunctionForId(function_id), 2));
5574 Add<HSimulate>(stmt->FilterId());
5575 FinishCurrentBlock(New<HCompareObjectEqAndBranch>(
5576 Top(), graph()->GetConstantUndefined(), if_slow_skip, if_slow_pass));
5577 }
5578 set_current_block(if_slow_pass);
5579 { Goto(if_join); }
5580 set_current_block(if_slow_skip);
5581 {
5582 // The key is no longer valid for enumerable, skip it.
5583 Drop(1);
5584 Goto(continue_block);
5585 }
5586 if_join->SetJoinId(stmt->FilterId());
5587 set_current_block(if_join);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005588 key = Pop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005589 }
5590
Ben Murdoch097c5b22016-05-18 11:27:45 +01005591 Bind(each_var, key);
5592 Add<HSimulate>(stmt->AssignmentId());
5593
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005594 BreakAndContinueInfo break_info(stmt, scope(), 5);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005595 break_info.set_continue_block(continue_block);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005596 {
5597 BreakAndContinueScope push(&break_info, this);
5598 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5599 }
5600
5601 HBasicBlock* body_exit =
5602 JoinContinue(stmt, current_block(), break_info.continue_block());
5603
5604 if (body_exit != NULL) {
5605 set_current_block(body_exit);
5606
5607 HValue* current_index = Pop();
Ben Murdoch097c5b22016-05-18 11:27:45 +01005608 HValue* increment =
5609 AddUncasted<HAdd>(current_index, graph()->GetConstant1());
5610 increment->ClearFlag(HValue::kCanOverflow);
5611 Push(increment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005612 body_exit = current_block();
5613 }
5614
5615 HBasicBlock* loop_exit = CreateLoop(stmt,
5616 loop_entry,
5617 body_exit,
5618 loop_successor,
5619 break_info.break_block());
5620
5621 set_current_block(loop_exit);
5622}
5623
5624
5625void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
5626 DCHECK(!HasStackOverflow());
5627 DCHECK(current_block() != NULL);
5628 DCHECK(current_block()->HasPredecessor());
5629 return Bailout(kForOfStatement);
5630}
5631
5632
5633void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
5634 DCHECK(!HasStackOverflow());
5635 DCHECK(current_block() != NULL);
5636 DCHECK(current_block()->HasPredecessor());
5637 return Bailout(kTryCatchStatement);
5638}
5639
5640
5641void HOptimizedGraphBuilder::VisitTryFinallyStatement(
5642 TryFinallyStatement* stmt) {
5643 DCHECK(!HasStackOverflow());
5644 DCHECK(current_block() != NULL);
5645 DCHECK(current_block()->HasPredecessor());
5646 return Bailout(kTryFinallyStatement);
5647}
5648
5649
5650void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
5651 DCHECK(!HasStackOverflow());
5652 DCHECK(current_block() != NULL);
5653 DCHECK(current_block()->HasPredecessor());
5654 return Bailout(kDebuggerStatement);
5655}
5656
5657
5658void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
5659 UNREACHABLE();
5660}
5661
5662
5663void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
5664 DCHECK(!HasStackOverflow());
5665 DCHECK(current_block() != NULL);
5666 DCHECK(current_block()->HasPredecessor());
5667 Handle<SharedFunctionInfo> shared_info = Compiler::GetSharedFunctionInfo(
5668 expr, current_info()->script(), top_info());
5669 // We also have a stack overflow if the recursive compilation did.
5670 if (HasStackOverflow()) return;
5671 // Use the fast case closure allocation code that allocates in new
5672 // space for nested functions that don't need literals cloning.
5673 HConstant* shared_info_value = Add<HConstant>(shared_info);
5674 HInstruction* instr;
5675 if (!expr->pretenure() && shared_info->num_literals() == 0) {
5676 FastNewClosureStub stub(isolate(), shared_info->language_mode(),
5677 shared_info->kind());
5678 FastNewClosureDescriptor descriptor(isolate());
5679 HValue* values[] = {context(), shared_info_value};
5680 HConstant* stub_value = Add<HConstant>(stub.GetCode());
Ben Murdochc5610432016-08-08 18:44:38 +01005681 instr = New<HCallWithDescriptor>(stub_value, 0, descriptor,
5682 ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005683 } else {
5684 Add<HPushArguments>(shared_info_value);
5685 Runtime::FunctionId function_id =
5686 expr->pretenure() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
5687 instr = New<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
5688 }
5689 return ast_context()->ReturnInstruction(instr, expr->id());
5690}
5691
5692
5693void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
5694 DCHECK(!HasStackOverflow());
5695 DCHECK(current_block() != NULL);
5696 DCHECK(current_block()->HasPredecessor());
5697 return Bailout(kClassLiteral);
5698}
5699
5700
5701void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
5702 NativeFunctionLiteral* expr) {
5703 DCHECK(!HasStackOverflow());
5704 DCHECK(current_block() != NULL);
5705 DCHECK(current_block()->HasPredecessor());
5706 return Bailout(kNativeFunctionLiteral);
5707}
5708
5709
5710void HOptimizedGraphBuilder::VisitDoExpression(DoExpression* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01005711 DoExpressionScope scope(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005712 DCHECK(!HasStackOverflow());
5713 DCHECK(current_block() != NULL);
5714 DCHECK(current_block()->HasPredecessor());
Ben Murdochda12d292016-06-02 14:46:10 +01005715 CHECK_ALIVE(VisitBlock(expr->block()));
5716 Visit(expr->result());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005717}
5718
5719
5720void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
5721 DCHECK(!HasStackOverflow());
5722 DCHECK(current_block() != NULL);
5723 DCHECK(current_block()->HasPredecessor());
5724 HBasicBlock* cond_true = graph()->CreateBasicBlock();
5725 HBasicBlock* cond_false = graph()->CreateBasicBlock();
5726 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
5727
5728 // Visit the true and false subexpressions in the same AST context as the
5729 // whole expression.
5730 if (cond_true->HasPredecessor()) {
5731 cond_true->SetJoinId(expr->ThenId());
5732 set_current_block(cond_true);
5733 CHECK_BAILOUT(Visit(expr->then_expression()));
5734 cond_true = current_block();
5735 } else {
5736 cond_true = NULL;
5737 }
5738
5739 if (cond_false->HasPredecessor()) {
5740 cond_false->SetJoinId(expr->ElseId());
5741 set_current_block(cond_false);
5742 CHECK_BAILOUT(Visit(expr->else_expression()));
5743 cond_false = current_block();
5744 } else {
5745 cond_false = NULL;
5746 }
5747
5748 if (!ast_context()->IsTest()) {
5749 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
5750 set_current_block(join);
5751 if (join != NULL && !ast_context()->IsEffect()) {
5752 return ast_context()->ReturnValue(Pop());
5753 }
5754 }
5755}
5756
5757
5758HOptimizedGraphBuilder::GlobalPropertyAccess
5759HOptimizedGraphBuilder::LookupGlobalProperty(Variable* var, LookupIterator* it,
5760 PropertyAccessType access_type) {
5761 if (var->is_this() || !current_info()->has_global_object()) {
5762 return kUseGeneric;
5763 }
5764
5765 switch (it->state()) {
5766 case LookupIterator::ACCESSOR:
5767 case LookupIterator::ACCESS_CHECK:
5768 case LookupIterator::INTERCEPTOR:
5769 case LookupIterator::INTEGER_INDEXED_EXOTIC:
5770 case LookupIterator::NOT_FOUND:
5771 return kUseGeneric;
5772 case LookupIterator::DATA:
5773 if (access_type == STORE && it->IsReadOnly()) return kUseGeneric;
Ben Murdochc5610432016-08-08 18:44:38 +01005774 if (!it->GetHolder<JSObject>()->IsJSGlobalObject()) return kUseGeneric;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005775 return kUseCell;
5776 case LookupIterator::JSPROXY:
5777 case LookupIterator::TRANSITION:
5778 UNREACHABLE();
5779 }
5780 UNREACHABLE();
5781 return kUseGeneric;
5782}
5783
5784
5785HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
5786 DCHECK(var->IsContextSlot());
5787 HValue* context = environment()->context();
5788 int length = scope()->ContextChainLength(var->scope());
5789 while (length-- > 0) {
5790 context = Add<HLoadNamedField>(
5791 context, nullptr,
5792 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5793 }
5794 return context;
5795}
5796
5797
5798void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
5799 DCHECK(!HasStackOverflow());
5800 DCHECK(current_block() != NULL);
5801 DCHECK(current_block()->HasPredecessor());
5802 Variable* variable = expr->var();
5803 switch (variable->location()) {
5804 case VariableLocation::GLOBAL:
5805 case VariableLocation::UNALLOCATED: {
5806 if (IsLexicalVariableMode(variable->mode())) {
5807 // TODO(rossberg): should this be an DCHECK?
5808 return Bailout(kReferenceToGlobalLexicalVariable);
5809 }
5810 // Handle known global constants like 'undefined' specially to avoid a
5811 // load from a global cell for them.
5812 Handle<Object> constant_value =
5813 isolate()->factory()->GlobalConstantFor(variable->name());
5814 if (!constant_value.is_null()) {
5815 HConstant* instr = New<HConstant>(constant_value);
5816 return ast_context()->ReturnInstruction(instr, expr->id());
5817 }
5818
5819 Handle<JSGlobalObject> global(current_info()->global_object());
5820
5821 // Lookup in script contexts.
5822 {
5823 Handle<ScriptContextTable> script_contexts(
5824 global->native_context()->script_context_table());
5825 ScriptContextTable::LookupResult lookup;
5826 if (ScriptContextTable::Lookup(script_contexts, variable->name(),
5827 &lookup)) {
5828 Handle<Context> script_context = ScriptContextTable::GetContext(
5829 script_contexts, lookup.context_index);
5830 Handle<Object> current_value =
Ben Murdoch097c5b22016-05-18 11:27:45 +01005831 FixedArray::get(*script_context, lookup.slot_index, isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005832
5833 // If the values is not the hole, it will stay initialized,
5834 // so no need to generate a check.
5835 if (*current_value == *isolate()->factory()->the_hole_value()) {
5836 return Bailout(kReferenceToUninitializedVariable);
5837 }
5838 HInstruction* result = New<HLoadNamedField>(
5839 Add<HConstant>(script_context), nullptr,
5840 HObjectAccess::ForContextSlot(lookup.slot_index));
5841 return ast_context()->ReturnInstruction(result, expr->id());
5842 }
5843 }
5844
5845 LookupIterator it(global, variable->name(), LookupIterator::OWN);
5846 GlobalPropertyAccess type = LookupGlobalProperty(variable, &it, LOAD);
5847
5848 if (type == kUseCell) {
5849 Handle<PropertyCell> cell = it.GetPropertyCell();
5850 top_info()->dependencies()->AssumePropertyCell(cell);
5851 auto cell_type = it.property_details().cell_type();
5852 if (cell_type == PropertyCellType::kConstant ||
5853 cell_type == PropertyCellType::kUndefined) {
5854 Handle<Object> constant_object(cell->value(), isolate());
5855 if (constant_object->IsConsString()) {
5856 constant_object =
5857 String::Flatten(Handle<String>::cast(constant_object));
5858 }
5859 HConstant* constant = New<HConstant>(constant_object);
5860 return ast_context()->ReturnInstruction(constant, expr->id());
5861 } else {
5862 auto access = HObjectAccess::ForPropertyCellValue();
5863 UniqueSet<Map>* field_maps = nullptr;
5864 if (cell_type == PropertyCellType::kConstantType) {
5865 switch (cell->GetConstantType()) {
5866 case PropertyCellConstantType::kSmi:
5867 access = access.WithRepresentation(Representation::Smi());
5868 break;
5869 case PropertyCellConstantType::kStableMap: {
5870 // Check that the map really is stable. The heap object could
5871 // have mutated without the cell updating state. In that case,
5872 // make no promises about the loaded value except that it's a
5873 // heap object.
5874 access =
5875 access.WithRepresentation(Representation::HeapObject());
5876 Handle<Map> map(HeapObject::cast(cell->value())->map());
5877 if (map->is_stable()) {
5878 field_maps = new (zone())
5879 UniqueSet<Map>(Unique<Map>::CreateImmovable(map), zone());
5880 }
5881 break;
5882 }
5883 }
5884 }
5885 HConstant* cell_constant = Add<HConstant>(cell);
5886 HLoadNamedField* instr;
5887 if (field_maps == nullptr) {
5888 instr = New<HLoadNamedField>(cell_constant, nullptr, access);
5889 } else {
5890 instr = New<HLoadNamedField>(cell_constant, nullptr, access,
5891 field_maps, HType::HeapObject());
5892 }
5893 instr->ClearDependsOnFlag(kInobjectFields);
5894 instr->SetDependsOnFlag(kGlobalVars);
5895 return ast_context()->ReturnInstruction(instr, expr->id());
5896 }
5897 } else {
5898 HValue* global_object = Add<HLoadNamedField>(
5899 BuildGetNativeContext(), nullptr,
5900 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
5901 HLoadGlobalGeneric* instr = New<HLoadGlobalGeneric>(
5902 global_object, variable->name(), ast_context()->typeof_mode());
5903 instr->SetVectorAndSlot(handle(current_feedback_vector(), isolate()),
5904 expr->VariableFeedbackSlot());
5905 return ast_context()->ReturnInstruction(instr, expr->id());
5906 }
5907 }
5908
5909 case VariableLocation::PARAMETER:
5910 case VariableLocation::LOCAL: {
5911 HValue* value = LookupAndMakeLive(variable);
5912 if (value == graph()->GetConstantHole()) {
5913 DCHECK(IsDeclaredVariableMode(variable->mode()) &&
5914 variable->mode() != VAR);
5915 return Bailout(kReferenceToUninitializedVariable);
5916 }
5917 return ast_context()->ReturnValue(value);
5918 }
5919
5920 case VariableLocation::CONTEXT: {
5921 HValue* context = BuildContextChainWalk(variable);
5922 HLoadContextSlot::Mode mode;
5923 switch (variable->mode()) {
5924 case LET:
5925 case CONST:
5926 mode = HLoadContextSlot::kCheckDeoptimize;
5927 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005928 default:
5929 mode = HLoadContextSlot::kNoCheck;
5930 break;
5931 }
5932 HLoadContextSlot* instr =
5933 new(zone()) HLoadContextSlot(context, variable->index(), mode);
5934 return ast_context()->ReturnInstruction(instr, expr->id());
5935 }
5936
5937 case VariableLocation::LOOKUP:
5938 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
5939 }
5940}
5941
5942
5943void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
5944 DCHECK(!HasStackOverflow());
5945 DCHECK(current_block() != NULL);
5946 DCHECK(current_block()->HasPredecessor());
5947 HConstant* instr = New<HConstant>(expr->value());
5948 return ast_context()->ReturnInstruction(instr, expr->id());
5949}
5950
5951
5952void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
5953 DCHECK(!HasStackOverflow());
5954 DCHECK(current_block() != NULL);
5955 DCHECK(current_block()->HasPredecessor());
5956 Callable callable = CodeFactory::FastCloneRegExp(isolate());
5957 HValue* values[] = {
5958 context(), AddThisFunction(), Add<HConstant>(expr->literal_index()),
5959 Add<HConstant>(expr->pattern()), Add<HConstant>(expr->flags())};
5960 HConstant* stub_value = Add<HConstant>(callable.code());
Ben Murdochc5610432016-08-08 18:44:38 +01005961 HInstruction* instr = New<HCallWithDescriptor>(
5962 stub_value, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005963 return ast_context()->ReturnInstruction(instr, expr->id());
5964}
5965
5966
5967static bool CanInlinePropertyAccess(Handle<Map> map) {
5968 if (map->instance_type() == HEAP_NUMBER_TYPE) return true;
5969 if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
5970 return map->IsJSObjectMap() && !map->is_dictionary_map() &&
5971 !map->has_named_interceptor() &&
5972 // TODO(verwaest): Whitelist contexts to which we have access.
5973 !map->is_access_check_needed();
5974}
5975
5976
5977// Determines whether the given array or object literal boilerplate satisfies
5978// all limits to be considered for fast deep-copying and computes the total
5979// size of all objects that are part of the graph.
5980static bool IsFastLiteral(Handle<JSObject> boilerplate,
5981 int max_depth,
5982 int* max_properties) {
5983 if (boilerplate->map()->is_deprecated() &&
5984 !JSObject::TryMigrateInstance(boilerplate)) {
5985 return false;
5986 }
5987
5988 DCHECK(max_depth >= 0 && *max_properties >= 0);
5989 if (max_depth == 0) return false;
5990
5991 Isolate* isolate = boilerplate->GetIsolate();
5992 Handle<FixedArrayBase> elements(boilerplate->elements());
5993 if (elements->length() > 0 &&
5994 elements->map() != isolate->heap()->fixed_cow_array_map()) {
5995 if (boilerplate->HasFastSmiOrObjectElements()) {
5996 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5997 int length = elements->length();
5998 for (int i = 0; i < length; i++) {
5999 if ((*max_properties)-- == 0) return false;
6000 Handle<Object> value(fast_elements->get(i), isolate);
6001 if (value->IsJSObject()) {
6002 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
6003 if (!IsFastLiteral(value_object,
6004 max_depth - 1,
6005 max_properties)) {
6006 return false;
6007 }
6008 }
6009 }
6010 } else if (!boilerplate->HasFastDoubleElements()) {
6011 return false;
6012 }
6013 }
6014
6015 Handle<FixedArray> properties(boilerplate->properties());
6016 if (properties->length() > 0) {
6017 return false;
6018 } else {
6019 Handle<DescriptorArray> descriptors(
6020 boilerplate->map()->instance_descriptors());
6021 int limit = boilerplate->map()->NumberOfOwnDescriptors();
6022 for (int i = 0; i < limit; i++) {
6023 PropertyDetails details = descriptors->GetDetails(i);
6024 if (details.type() != DATA) continue;
6025 if ((*max_properties)-- == 0) return false;
6026 FieldIndex field_index = FieldIndex::ForDescriptor(boilerplate->map(), i);
6027 if (boilerplate->IsUnboxedDoubleField(field_index)) continue;
6028 Handle<Object> value(boilerplate->RawFastPropertyAt(field_index),
6029 isolate);
6030 if (value->IsJSObject()) {
6031 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
6032 if (!IsFastLiteral(value_object,
6033 max_depth - 1,
6034 max_properties)) {
6035 return false;
6036 }
6037 }
6038 }
6039 }
6040 return true;
6041}
6042
6043
6044void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
6045 DCHECK(!HasStackOverflow());
6046 DCHECK(current_block() != NULL);
6047 DCHECK(current_block()->HasPredecessor());
6048
6049 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
6050 HInstruction* literal;
6051
6052 // Check whether to use fast or slow deep-copying for boilerplate.
6053 int max_properties = kMaxFastLiteralProperties;
6054 Handle<Object> literals_cell(
6055 closure->literals()->literal(expr->literal_index()), isolate());
6056 Handle<AllocationSite> site;
6057 Handle<JSObject> boilerplate;
6058 if (!literals_cell->IsUndefined()) {
6059 // Retrieve the boilerplate
6060 site = Handle<AllocationSite>::cast(literals_cell);
6061 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
6062 isolate());
6063 }
6064
6065 if (!boilerplate.is_null() &&
6066 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
6067 AllocationSiteUsageContext site_context(isolate(), site, false);
6068 site_context.EnterNewScope();
6069 literal = BuildFastLiteral(boilerplate, &site_context);
6070 site_context.ExitScope(site, boilerplate);
6071 } else {
6072 NoObservableSideEffectsScope no_effects(this);
6073 Handle<FixedArray> constant_properties = expr->constant_properties();
6074 int literal_index = expr->literal_index();
6075 int flags = expr->ComputeFlags(true);
6076
6077 Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
6078 Add<HConstant>(constant_properties),
6079 Add<HConstant>(flags));
6080
6081 Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
6082 literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
6083 }
6084
6085 // The object is expected in the bailout environment during computation
6086 // of the property values and is the value of the entire expression.
6087 Push(literal);
6088 for (int i = 0; i < expr->properties()->length(); i++) {
6089 ObjectLiteral::Property* property = expr->properties()->at(i);
6090 if (property->is_computed_name()) return Bailout(kComputedPropertyName);
6091 if (property->IsCompileTimeValue()) continue;
6092
6093 Literal* key = property->key()->AsLiteral();
6094 Expression* value = property->value();
6095
6096 switch (property->kind()) {
6097 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
6098 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
6099 // Fall through.
6100 case ObjectLiteral::Property::COMPUTED:
6101 // It is safe to use [[Put]] here because the boilerplate already
6102 // contains computed properties with an uninitialized value.
6103 if (key->value()->IsInternalizedString()) {
6104 if (property->emit_store()) {
6105 CHECK_ALIVE(VisitForValue(value));
6106 HValue* value = Pop();
6107
6108 Handle<Map> map = property->GetReceiverType();
6109 Handle<String> name = key->AsPropertyName();
6110 HValue* store;
6111 FeedbackVectorSlot slot = property->GetSlot();
6112 if (map.is_null()) {
6113 // If we don't know the monomorphic type, do a generic store.
6114 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal,
6115 name, value));
6116 } else {
6117 PropertyAccessInfo info(this, STORE, map, name);
6118 if (info.CanAccessMonomorphic()) {
6119 HValue* checked_literal = Add<HCheckMaps>(literal, map);
6120 DCHECK(!info.IsAccessorConstant());
6121 store = BuildMonomorphicAccess(
6122 &info, literal, checked_literal, value,
6123 BailoutId::None(), BailoutId::None());
6124 } else {
6125 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot,
6126 literal, name, value));
6127 }
6128 }
6129 if (store->IsInstruction()) {
6130 AddInstruction(HInstruction::cast(store));
6131 }
6132 DCHECK(store->HasObservableSideEffects());
6133 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
6134
6135 // Add [[HomeObject]] to function literals.
6136 if (FunctionLiteral::NeedsHomeObject(property->value())) {
6137 Handle<Symbol> sym = isolate()->factory()->home_object_symbol();
6138 HInstruction* store_home = BuildNamedGeneric(
6139 STORE, NULL, property->GetSlot(1), value, sym, literal);
6140 AddInstruction(store_home);
6141 DCHECK(store_home->HasObservableSideEffects());
6142 Add<HSimulate>(property->value()->id(), REMOVABLE_SIMULATE);
6143 }
6144 } else {
6145 CHECK_ALIVE(VisitForEffect(value));
6146 }
6147 break;
6148 }
6149 // Fall through.
6150 case ObjectLiteral::Property::PROTOTYPE:
6151 case ObjectLiteral::Property::SETTER:
6152 case ObjectLiteral::Property::GETTER:
6153 return Bailout(kObjectLiteralWithComplexProperty);
6154 default: UNREACHABLE();
6155 }
6156 }
6157
Ben Murdochda12d292016-06-02 14:46:10 +01006158 return ast_context()->ReturnValue(Pop());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006159}
6160
6161
6162void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
6163 DCHECK(!HasStackOverflow());
6164 DCHECK(current_block() != NULL);
6165 DCHECK(current_block()->HasPredecessor());
6166 ZoneList<Expression*>* subexprs = expr->values();
6167 int length = subexprs->length();
6168 HInstruction* literal;
6169
6170 Handle<AllocationSite> site;
6171 Handle<LiteralsArray> literals(environment()->closure()->literals(),
6172 isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006173 Handle<Object> literals_cell(literals->literal(expr->literal_index()),
6174 isolate());
6175 Handle<JSObject> boilerplate_object;
Ben Murdochc5610432016-08-08 18:44:38 +01006176 if (!literals_cell->IsUndefined()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006177 DCHECK(literals_cell->IsAllocationSite());
6178 site = Handle<AllocationSite>::cast(literals_cell);
6179 boilerplate_object = Handle<JSObject>(
6180 JSObject::cast(site->transition_info()), isolate());
6181 }
6182
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006183 // Check whether to use fast or slow deep-copying for boilerplate.
6184 int max_properties = kMaxFastLiteralProperties;
Ben Murdochc5610432016-08-08 18:44:38 +01006185 if (!boilerplate_object.is_null() &&
6186 IsFastLiteral(boilerplate_object, kMaxFastLiteralDepth,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006187 &max_properties)) {
Ben Murdochc5610432016-08-08 18:44:38 +01006188 DCHECK(site->SitePointsToLiteral());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006189 AllocationSiteUsageContext site_context(isolate(), site, false);
6190 site_context.EnterNewScope();
6191 literal = BuildFastLiteral(boilerplate_object, &site_context);
6192 site_context.ExitScope(site, boilerplate_object);
6193 } else {
6194 NoObservableSideEffectsScope no_effects(this);
Ben Murdochc5610432016-08-08 18:44:38 +01006195 Handle<FixedArray> constants = expr->constant_elements();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006196 int literal_index = expr->literal_index();
6197 int flags = expr->ComputeFlags(true);
6198
6199 Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
6200 Add<HConstant>(constants), Add<HConstant>(flags));
6201
6202 Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
6203 literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
6204
6205 // Register to deopt if the boilerplate ElementsKind changes.
Ben Murdochc5610432016-08-08 18:44:38 +01006206 if (!site.is_null()) {
6207 top_info()->dependencies()->AssumeTransitionStable(site);
6208 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006209 }
6210
6211 // The array is expected in the bailout environment during computation
6212 // of the property values and is the value of the entire expression.
6213 Push(literal);
6214
6215 HInstruction* elements = NULL;
6216
6217 for (int i = 0; i < length; i++) {
6218 Expression* subexpr = subexprs->at(i);
Ben Murdoch097c5b22016-05-18 11:27:45 +01006219 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006220
6221 // If the subexpression is a literal or a simple materialized literal it
6222 // is already set in the cloned array.
6223 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
6224
6225 CHECK_ALIVE(VisitForValue(subexpr));
6226 HValue* value = Pop();
6227 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
6228
6229 elements = AddLoadElements(literal);
6230
6231 HValue* key = Add<HConstant>(i);
6232
Ben Murdochc5610432016-08-08 18:44:38 +01006233 if (!boilerplate_object.is_null()) {
6234 ElementsKind boilerplate_elements_kind =
6235 boilerplate_object->GetElementsKind();
6236 switch (boilerplate_elements_kind) {
6237 case FAST_SMI_ELEMENTS:
6238 case FAST_HOLEY_SMI_ELEMENTS:
6239 case FAST_ELEMENTS:
6240 case FAST_HOLEY_ELEMENTS:
6241 case FAST_DOUBLE_ELEMENTS:
6242 case FAST_HOLEY_DOUBLE_ELEMENTS: {
6243 Add<HStoreKeyed>(elements, key, value, nullptr,
6244 boilerplate_elements_kind);
6245 break;
6246 }
6247 default:
6248 UNREACHABLE();
6249 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006250 }
Ben Murdochc5610432016-08-08 18:44:38 +01006251 } else {
6252 HInstruction* instr = BuildKeyedGeneric(
6253 STORE, expr, expr->LiteralFeedbackSlot(), literal, key, value);
6254 AddInstruction(instr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006255 }
6256
6257 Add<HSimulate>(expr->GetIdForElement(i));
6258 }
6259
6260 return ast_context()->ReturnValue(Pop());
6261}
6262
6263
6264HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
6265 Handle<Map> map) {
6266 BuildCheckHeapObject(object);
6267 return Add<HCheckMaps>(object, map);
6268}
6269
6270
6271HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
6272 PropertyAccessInfo* info,
6273 HValue* checked_object) {
6274 // See if this is a load for an immutable property
6275 if (checked_object->ActualValue()->IsConstant()) {
6276 Handle<Object> object(
6277 HConstant::cast(checked_object->ActualValue())->handle(isolate()));
6278
6279 if (object->IsJSObject()) {
6280 LookupIterator it(object, info->name(),
6281 LookupIterator::OWN_SKIP_INTERCEPTOR);
6282 Handle<Object> value = JSReceiver::GetDataProperty(&it);
6283 if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
6284 return New<HConstant>(value);
6285 }
6286 }
6287 }
6288
6289 HObjectAccess access = info->access();
6290 if (access.representation().IsDouble() &&
6291 (!FLAG_unbox_double_fields || !access.IsInobject())) {
6292 // Load the heap number.
6293 checked_object = Add<HLoadNamedField>(
6294 checked_object, nullptr,
6295 access.WithRepresentation(Representation::Tagged()));
6296 // Load the double value from it.
6297 access = HObjectAccess::ForHeapNumberValue();
6298 }
6299
6300 SmallMapList* map_list = info->field_maps();
6301 if (map_list->length() == 0) {
6302 return New<HLoadNamedField>(checked_object, checked_object, access);
6303 }
6304
6305 UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
6306 for (int i = 0; i < map_list->length(); ++i) {
6307 maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
6308 }
6309 return New<HLoadNamedField>(
6310 checked_object, checked_object, access, maps, info->field_type());
6311}
6312
6313
6314HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
6315 PropertyAccessInfo* info,
6316 HValue* checked_object,
6317 HValue* value) {
6318 bool transition_to_field = info->IsTransition();
6319 // TODO(verwaest): Move this logic into PropertyAccessInfo.
6320 HObjectAccess field_access = info->access();
6321
6322 HStoreNamedField *instr;
6323 if (field_access.representation().IsDouble() &&
6324 (!FLAG_unbox_double_fields || !field_access.IsInobject())) {
6325 HObjectAccess heap_number_access =
6326 field_access.WithRepresentation(Representation::Tagged());
6327 if (transition_to_field) {
6328 // The store requires a mutable HeapNumber to be allocated.
6329 NoObservableSideEffectsScope no_side_effects(this);
6330 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
6331
6332 // TODO(hpayer): Allocation site pretenuring support.
Ben Murdochc5610432016-08-08 18:44:38 +01006333 HInstruction* heap_number =
6334 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
6335 MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006336 AddStoreMapConstant(
6337 heap_number, isolate()->factory()->mutable_heap_number_map());
6338 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
6339 value);
6340 instr = New<HStoreNamedField>(checked_object->ActualValue(),
6341 heap_number_access,
6342 heap_number);
6343 } else {
6344 // Already holds a HeapNumber; load the box and write its value field.
6345 HInstruction* heap_number =
6346 Add<HLoadNamedField>(checked_object, nullptr, heap_number_access);
6347 instr = New<HStoreNamedField>(heap_number,
6348 HObjectAccess::ForHeapNumberValue(),
6349 value, STORE_TO_INITIALIZED_ENTRY);
6350 }
6351 } else {
6352 if (field_access.representation().IsHeapObject()) {
6353 BuildCheckHeapObject(value);
6354 }
6355
6356 if (!info->field_maps()->is_empty()) {
6357 DCHECK(field_access.representation().IsHeapObject());
6358 value = Add<HCheckMaps>(value, info->field_maps());
6359 }
6360
6361 // This is a normal store.
6362 instr = New<HStoreNamedField>(
6363 checked_object->ActualValue(), field_access, value,
6364 transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
6365 }
6366
6367 if (transition_to_field) {
6368 Handle<Map> transition(info->transition());
6369 DCHECK(!transition->is_deprecated());
6370 instr->SetTransition(Add<HConstant>(transition));
6371 }
6372 return instr;
6373}
6374
Ben Murdoch097c5b22016-05-18 11:27:45 +01006375Handle<FieldType>
6376HOptimizedGraphBuilder::PropertyAccessInfo::GetFieldTypeFromMap(
6377 Handle<Map> map) const {
6378 DCHECK(IsFound());
6379 DCHECK(number_ < map->NumberOfOwnDescriptors());
6380 return handle(map->instance_descriptors()->GetFieldType(number_), isolate());
6381}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006382
6383bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
6384 PropertyAccessInfo* info) {
6385 if (!CanInlinePropertyAccess(map_)) return false;
6386
6387 // Currently only handle Type::Number as a polymorphic case.
6388 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6389 // instruction.
6390 if (IsNumberType()) return false;
6391
6392 // Values are only compatible for monomorphic load if they all behave the same
6393 // regarding value wrappers.
6394 if (IsValueWrapped() != info->IsValueWrapped()) return false;
6395
6396 if (!LookupDescriptor()) return false;
6397
6398 if (!IsFound()) {
6399 return (!info->IsFound() || info->has_holder()) &&
6400 map()->prototype() == info->map()->prototype();
6401 }
6402
6403 // Mismatch if the other access info found the property in the prototype
6404 // chain.
6405 if (info->has_holder()) return false;
6406
6407 if (IsAccessorConstant()) {
6408 return accessor_.is_identical_to(info->accessor_) &&
6409 api_holder_.is_identical_to(info->api_holder_);
6410 }
6411
6412 if (IsDataConstant()) {
6413 return constant_.is_identical_to(info->constant_);
6414 }
6415
6416 DCHECK(IsData());
6417 if (!info->IsData()) return false;
6418
6419 Representation r = access_.representation();
6420 if (IsLoad()) {
6421 if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
6422 } else {
6423 if (!info->access_.representation().IsCompatibleForStore(r)) return false;
6424 }
6425 if (info->access_.offset() != access_.offset()) return false;
6426 if (info->access_.IsInobject() != access_.IsInobject()) return false;
6427 if (IsLoad()) {
6428 if (field_maps_.is_empty()) {
6429 info->field_maps_.Clear();
6430 } else if (!info->field_maps_.is_empty()) {
6431 for (int i = 0; i < field_maps_.length(); ++i) {
6432 info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
6433 }
6434 info->field_maps_.Sort();
6435 }
6436 } else {
6437 // We can only merge stores that agree on their field maps. The comparison
6438 // below is safe, since we keep the field maps sorted.
6439 if (field_maps_.length() != info->field_maps_.length()) return false;
6440 for (int i = 0; i < field_maps_.length(); ++i) {
6441 if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
6442 return false;
6443 }
6444 }
6445 }
6446 info->GeneralizeRepresentation(r);
6447 info->field_type_ = info->field_type_.Combine(field_type_);
6448 return true;
6449}
6450
6451
6452bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
6453 if (!map_->IsJSObjectMap()) return true;
6454 LookupDescriptor(*map_, *name_);
6455 return LoadResult(map_);
6456}
6457
6458
6459bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
6460 if (!IsLoad() && IsProperty() && IsReadOnly()) {
6461 return false;
6462 }
6463
6464 if (IsData()) {
6465 // Construct the object field access.
6466 int index = GetLocalFieldIndexFromMap(map);
6467 access_ = HObjectAccess::ForField(map, index, representation(), name_);
6468
6469 // Load field map for heap objects.
6470 return LoadFieldMaps(map);
6471 } else if (IsAccessorConstant()) {
6472 Handle<Object> accessors = GetAccessorsFromMap(map);
6473 if (!accessors->IsAccessorPair()) return false;
6474 Object* raw_accessor =
6475 IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
6476 : Handle<AccessorPair>::cast(accessors)->setter();
Ben Murdoch097c5b22016-05-18 11:27:45 +01006477 if (!raw_accessor->IsJSFunction() &&
6478 !raw_accessor->IsFunctionTemplateInfo())
6479 return false;
6480 Handle<Object> accessor = handle(HeapObject::cast(raw_accessor));
6481 CallOptimization call_optimization(accessor);
6482 if (call_optimization.is_simple_api_call()) {
6483 CallOptimization::HolderLookup holder_lookup;
6484 api_holder_ =
6485 call_optimization.LookupHolderOfExpectedType(map_, &holder_lookup);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006486 }
6487 accessor_ = accessor;
6488 } else if (IsDataConstant()) {
6489 constant_ = GetConstantFromMap(map);
6490 }
6491
6492 return true;
6493}
6494
6495
6496bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
6497 Handle<Map> map) {
6498 // Clear any previously collected field maps/type.
6499 field_maps_.Clear();
6500 field_type_ = HType::Tagged();
6501
6502 // Figure out the field type from the accessor map.
Ben Murdoch097c5b22016-05-18 11:27:45 +01006503 Handle<FieldType> field_type = GetFieldTypeFromMap(map);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006504
6505 // Collect the (stable) maps from the field type.
Ben Murdoch097c5b22016-05-18 11:27:45 +01006506 if (field_type->IsClass()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006507 DCHECK(access_.representation().IsHeapObject());
Ben Murdoch097c5b22016-05-18 11:27:45 +01006508 Handle<Map> field_map = field_type->AsClass();
6509 if (field_map->is_stable()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006510 field_maps_.Add(field_map, zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006511 }
6512 }
6513
6514 if (field_maps_.is_empty()) {
6515 // Store is not safe if the field map was cleared.
Ben Murdoch097c5b22016-05-18 11:27:45 +01006516 return IsLoad() || !field_type->IsNone();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006517 }
6518
Ben Murdoch097c5b22016-05-18 11:27:45 +01006519 // Determine field HType from field type.
6520 field_type_ = HType::FromFieldType(field_type, zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006521 DCHECK(field_type_.IsHeapObject());
6522
6523 // Add dependency on the map that introduced the field.
6524 top_info()->dependencies()->AssumeFieldType(GetFieldOwnerFromMap(map));
6525 return true;
6526}
6527
6528
6529bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
6530 Handle<Map> map = this->map();
Ben Murdoch097c5b22016-05-18 11:27:45 +01006531 if (name_->IsPrivate()) {
6532 NotFound();
6533 return !map->has_hidden_prototype();
6534 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006535
6536 while (map->prototype()->IsJSObject()) {
6537 holder_ = handle(JSObject::cast(map->prototype()));
6538 if (holder_->map()->is_deprecated()) {
6539 JSObject::TryMigrateInstance(holder_);
6540 }
6541 map = Handle<Map>(holder_->map());
6542 if (!CanInlinePropertyAccess(map)) {
6543 NotFound();
6544 return false;
6545 }
6546 LookupDescriptor(*map, *name_);
6547 if (IsFound()) return LoadResult(map);
6548 }
6549
6550 NotFound();
6551 return !map->prototype()->IsJSReceiver();
6552}
6553
6554
6555bool HOptimizedGraphBuilder::PropertyAccessInfo::IsIntegerIndexedExotic() {
6556 InstanceType instance_type = map_->instance_type();
6557 return instance_type == JS_TYPED_ARRAY_TYPE && name_->IsString() &&
6558 IsSpecialIndex(isolate()->unicode_cache(), String::cast(*name_));
6559}
6560
6561
6562bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
6563 if (!CanInlinePropertyAccess(map_)) return false;
6564 if (IsJSObjectFieldAccessor()) return IsLoad();
6565 if (IsJSArrayBufferViewFieldAccessor()) return IsLoad();
6566 if (map_->IsJSFunctionMap() && map_->is_constructor() &&
6567 !map_->has_non_instance_prototype() &&
6568 name_.is_identical_to(isolate()->factory()->prototype_string())) {
6569 return IsLoad();
6570 }
6571 if (!LookupDescriptor()) return false;
6572 if (IsFound()) return IsLoad() || !IsReadOnly();
6573 if (IsIntegerIndexedExotic()) return false;
6574 if (!LookupInPrototypes()) return false;
6575 if (IsLoad()) return true;
6576
6577 if (IsAccessorConstant()) return true;
6578 LookupTransition(*map_, *name_, NONE);
6579 if (IsTransitionToData() && map_->unused_property_fields() > 0) {
6580 // Construct the object field access.
6581 int descriptor = transition()->LastAdded();
6582 int index =
6583 transition()->instance_descriptors()->GetFieldIndex(descriptor) -
6584 map_->GetInObjectProperties();
6585 PropertyDetails details =
6586 transition()->instance_descriptors()->GetDetails(descriptor);
6587 Representation representation = details.representation();
6588 access_ = HObjectAccess::ForField(map_, index, representation, name_);
6589
6590 // Load field map for heap objects.
6591 return LoadFieldMaps(transition());
6592 }
6593 return false;
6594}
6595
6596
6597bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
6598 SmallMapList* maps) {
6599 DCHECK(map_.is_identical_to(maps->first()));
6600 if (!CanAccessMonomorphic()) return false;
6601 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6602 if (maps->length() > kMaxLoadPolymorphism) return false;
6603 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6604 if (GetJSObjectFieldAccess(&access)) {
6605 for (int i = 1; i < maps->length(); ++i) {
6606 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6607 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
6608 if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
6609 if (!access.Equals(test_access)) return false;
6610 }
6611 return true;
6612 }
6613 if (GetJSArrayBufferViewFieldAccess(&access)) {
6614 for (int i = 1; i < maps->length(); ++i) {
6615 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6616 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
6617 if (!test_info.GetJSArrayBufferViewFieldAccess(&test_access)) {
6618 return false;
6619 }
6620 if (!access.Equals(test_access)) return false;
6621 }
6622 return true;
6623 }
6624
6625 // Currently only handle numbers as a polymorphic case.
6626 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6627 // instruction.
6628 if (IsNumberType()) return false;
6629
6630 // Multiple maps cannot transition to the same target map.
6631 DCHECK(!IsLoad() || !IsTransition());
6632 if (IsTransition() && maps->length() > 1) return false;
6633
6634 for (int i = 1; i < maps->length(); ++i) {
6635 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6636 if (!test_info.IsCompatible(this)) return false;
6637 }
6638
6639 return true;
6640}
6641
6642
6643Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() {
6644 Handle<JSFunction> ctor;
6645 if (Map::GetConstructorFunction(
6646 map_, handle(current_info()->closure()->context()->native_context()))
6647 .ToHandle(&ctor)) {
6648 return handle(ctor->initial_map());
6649 }
6650 return map_;
6651}
6652
6653
6654static bool NeedsWrapping(Handle<Map> map, Handle<JSFunction> target) {
6655 return !map->IsJSObjectMap() &&
6656 is_sloppy(target->shared()->language_mode()) &&
6657 !target->shared()->native();
6658}
6659
6660
6661bool HOptimizedGraphBuilder::PropertyAccessInfo::NeedsWrappingFor(
6662 Handle<JSFunction> target) const {
6663 return NeedsWrapping(map_, target);
6664}
6665
6666
6667HValue* HOptimizedGraphBuilder::BuildMonomorphicAccess(
6668 PropertyAccessInfo* info, HValue* object, HValue* checked_object,
6669 HValue* value, BailoutId ast_id, BailoutId return_id,
6670 bool can_inline_accessor) {
6671 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6672 if (info->GetJSObjectFieldAccess(&access)) {
6673 DCHECK(info->IsLoad());
6674 return New<HLoadNamedField>(object, checked_object, access);
6675 }
6676
6677 if (info->GetJSArrayBufferViewFieldAccess(&access)) {
6678 DCHECK(info->IsLoad());
6679 checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
6680 return New<HLoadNamedField>(object, checked_object, access);
6681 }
6682
6683 if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
6684 info->map()->IsJSFunctionMap() && info->map()->is_constructor()) {
6685 DCHECK(!info->map()->has_non_instance_prototype());
6686 return New<HLoadFunctionPrototype>(checked_object);
6687 }
6688
6689 HValue* checked_holder = checked_object;
6690 if (info->has_holder()) {
6691 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
6692 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
6693 }
6694
6695 if (!info->IsFound()) {
6696 DCHECK(info->IsLoad());
Ben Murdochda12d292016-06-02 14:46:10 +01006697 return graph()->GetConstantUndefined();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006698 }
6699
6700 if (info->IsData()) {
6701 if (info->IsLoad()) {
6702 return BuildLoadNamedField(info, checked_holder);
6703 } else {
6704 return BuildStoreNamedField(info, checked_object, value);
6705 }
6706 }
6707
6708 if (info->IsTransition()) {
6709 DCHECK(!info->IsLoad());
6710 return BuildStoreNamedField(info, checked_object, value);
6711 }
6712
6713 if (info->IsAccessorConstant()) {
6714 Push(checked_object);
6715 int argument_count = 1;
6716 if (!info->IsLoad()) {
6717 argument_count = 2;
6718 Push(value);
6719 }
6720
Ben Murdoch097c5b22016-05-18 11:27:45 +01006721 if (info->accessor()->IsJSFunction() &&
6722 info->NeedsWrappingFor(Handle<JSFunction>::cast(info->accessor()))) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006723 HValue* function = Add<HConstant>(info->accessor());
6724 PushArgumentsFromEnvironment(argument_count);
Ben Murdochda12d292016-06-02 14:46:10 +01006725 return NewCallFunction(function, argument_count, TailCallMode::kDisallow,
6726 ConvertReceiverMode::kNotNullOrUndefined,
6727 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006728 } else if (FLAG_inline_accessors && can_inline_accessor) {
6729 bool success = info->IsLoad()
6730 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
6731 : TryInlineSetter(
6732 info->accessor(), info->map(), ast_id, return_id, value);
6733 if (success || HasStackOverflow()) return NULL;
6734 }
6735
6736 PushArgumentsFromEnvironment(argument_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01006737 if (!info->accessor()->IsJSFunction()) {
6738 Bailout(kInliningBailedOut);
6739 return nullptr;
6740 }
Ben Murdochda12d292016-06-02 14:46:10 +01006741 return NewCallConstantFunction(Handle<JSFunction>::cast(info->accessor()),
6742 argument_count, TailCallMode::kDisallow,
6743 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006744 }
6745
6746 DCHECK(info->IsDataConstant());
6747 if (info->IsLoad()) {
6748 return New<HConstant>(info->constant());
6749 } else {
6750 return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
6751 }
6752}
6753
6754
6755void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
6756 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
6757 BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value,
6758 SmallMapList* maps, Handle<Name> name) {
6759 // Something did not match; must use a polymorphic load.
6760 int count = 0;
6761 HBasicBlock* join = NULL;
6762 HBasicBlock* number_block = NULL;
6763 bool handled_string = false;
6764
6765 bool handle_smi = false;
6766 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6767 int i;
6768 for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6769 PropertyAccessInfo info(this, access_type, maps->at(i), name);
6770 if (info.IsStringType()) {
6771 if (handled_string) continue;
6772 handled_string = true;
6773 }
6774 if (info.CanAccessMonomorphic()) {
6775 count++;
6776 if (info.IsNumberType()) {
6777 handle_smi = true;
6778 break;
6779 }
6780 }
6781 }
6782
6783 if (i < maps->length()) {
6784 count = -1;
6785 maps->Clear();
6786 } else {
6787 count = 0;
6788 }
6789 HControlInstruction* smi_check = NULL;
6790 handled_string = false;
6791
6792 for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6793 PropertyAccessInfo info(this, access_type, maps->at(i), name);
6794 if (info.IsStringType()) {
6795 if (handled_string) continue;
6796 handled_string = true;
6797 }
6798 if (!info.CanAccessMonomorphic()) continue;
6799
6800 if (count == 0) {
6801 join = graph()->CreateBasicBlock();
6802 if (handle_smi) {
6803 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6804 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6805 number_block = graph()->CreateBasicBlock();
6806 smi_check = New<HIsSmiAndBranch>(
6807 object, empty_smi_block, not_smi_block);
6808 FinishCurrentBlock(smi_check);
6809 GotoNoSimulate(empty_smi_block, number_block);
6810 set_current_block(not_smi_block);
6811 } else {
6812 BuildCheckHeapObject(object);
6813 }
6814 }
6815 ++count;
6816 HBasicBlock* if_true = graph()->CreateBasicBlock();
6817 HBasicBlock* if_false = graph()->CreateBasicBlock();
6818 HUnaryControlInstruction* compare;
6819
6820 HValue* dependency;
6821 if (info.IsNumberType()) {
6822 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6823 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
6824 dependency = smi_check;
6825 } else if (info.IsStringType()) {
6826 compare = New<HIsStringAndBranch>(object, if_true, if_false);
6827 dependency = compare;
6828 } else {
6829 compare = New<HCompareMap>(object, info.map(), if_true, if_false);
6830 dependency = compare;
6831 }
6832 FinishCurrentBlock(compare);
6833
6834 if (info.IsNumberType()) {
6835 GotoNoSimulate(if_true, number_block);
6836 if_true = number_block;
6837 }
6838
6839 set_current_block(if_true);
6840
6841 HValue* access =
6842 BuildMonomorphicAccess(&info, object, dependency, value, ast_id,
6843 return_id, FLAG_polymorphic_inlining);
6844
6845 HValue* result = NULL;
6846 switch (access_type) {
6847 case LOAD:
6848 result = access;
6849 break;
6850 case STORE:
6851 result = value;
6852 break;
6853 }
6854
6855 if (access == NULL) {
6856 if (HasStackOverflow()) return;
6857 } else {
6858 if (access->IsInstruction()) {
6859 HInstruction* instr = HInstruction::cast(access);
6860 if (!instr->IsLinked()) AddInstruction(instr);
6861 }
6862 if (!ast_context()->IsEffect()) Push(result);
6863 }
6864
6865 if (current_block() != NULL) Goto(join);
6866 set_current_block(if_false);
6867 }
6868
6869 // Finish up. Unconditionally deoptimize if we've handled all the maps we
6870 // know about and do not want to handle ones we've never seen. Otherwise
6871 // use a generic IC.
6872 if (count == maps->length() && FLAG_deoptimize_uncommon_cases) {
6873 FinishExitWithHardDeoptimization(
6874 Deoptimizer::kUnknownMapInPolymorphicAccess);
6875 } else {
6876 HInstruction* instr =
6877 BuildNamedGeneric(access_type, expr, slot, object, name, value);
6878 AddInstruction(instr);
6879 if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
6880
6881 if (join != NULL) {
6882 Goto(join);
6883 } else {
6884 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6885 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6886 return;
6887 }
6888 }
6889
6890 DCHECK(join != NULL);
6891 if (join->HasPredecessor()) {
6892 join->SetJoinId(ast_id);
6893 set_current_block(join);
6894 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6895 } else {
6896 set_current_block(NULL);
6897 }
6898}
6899
Ben Murdoch097c5b22016-05-18 11:27:45 +01006900static bool ComputeReceiverTypes(Expression* expr, HValue* receiver,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006901 SmallMapList** t,
Ben Murdoch097c5b22016-05-18 11:27:45 +01006902 HOptimizedGraphBuilder* builder) {
6903 Zone* zone = builder->zone();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006904 SmallMapList* maps = expr->GetReceiverTypes();
6905 *t = maps;
6906 bool monomorphic = expr->IsMonomorphic();
6907 if (maps != NULL && receiver->HasMonomorphicJSObjectType()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01006908 if (maps->length() > 0) {
6909 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
6910 maps->FilterForPossibleTransitions(root_map);
6911 monomorphic = maps->length() == 1;
6912 } else {
6913 // No type feedback, see if we can infer the type. This is safely
6914 // possible if the receiver had a known map at some point, and no
6915 // map-changing stores have happened to it since.
6916 Handle<Map> candidate_map = receiver->GetMonomorphicJSObjectMap();
Ben Murdoch097c5b22016-05-18 11:27:45 +01006917 for (HInstruction* current = builder->current_block()->last();
6918 current != nullptr; current = current->previous()) {
6919 if (current->IsBlockEntry()) break;
6920 if (current->CheckChangesFlag(kMaps)) {
6921 // Only allow map changes that store the candidate map. We don't
6922 // need to care which object the map is being written into.
6923 if (!current->IsStoreNamedField()) break;
6924 HStoreNamedField* map_change = HStoreNamedField::cast(current);
6925 if (!map_change->value()->IsConstant()) break;
6926 HConstant* map_constant = HConstant::cast(map_change->value());
6927 if (!map_constant->representation().IsTagged()) break;
6928 Handle<Object> map = map_constant->handle(builder->isolate());
6929 if (!map.is_identical_to(candidate_map)) break;
6930 }
6931 if (current == receiver) {
6932 // We made it all the way back to the receiver without encountering
6933 // a map change! So we can assume that the receiver still has the
6934 // candidate_map we know about.
6935 maps->Add(candidate_map, zone);
6936 monomorphic = true;
6937 break;
6938 }
6939 }
6940 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006941 }
6942 return monomorphic && CanInlinePropertyAccess(maps->first());
6943}
6944
6945
6946static bool AreStringTypes(SmallMapList* maps) {
6947 for (int i = 0; i < maps->length(); i++) {
6948 if (maps->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6949 }
6950 return true;
6951}
6952
6953
6954void HOptimizedGraphBuilder::BuildStore(Expression* expr, Property* prop,
6955 FeedbackVectorSlot slot,
6956 BailoutId ast_id, BailoutId return_id,
6957 bool is_uninitialized) {
6958 if (!prop->key()->IsPropertyName()) {
6959 // Keyed store.
6960 HValue* value = Pop();
6961 HValue* key = Pop();
6962 HValue* object = Pop();
6963 bool has_side_effects = false;
6964 HValue* result =
6965 HandleKeyedElementAccess(object, key, value, expr, slot, ast_id,
6966 return_id, STORE, &has_side_effects);
6967 if (has_side_effects) {
6968 if (!ast_context()->IsEffect()) Push(value);
6969 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6970 if (!ast_context()->IsEffect()) Drop(1);
6971 }
6972 if (result == NULL) return;
6973 return ast_context()->ReturnValue(value);
6974 }
6975
6976 // Named store.
6977 HValue* value = Pop();
6978 HValue* object = Pop();
6979
6980 Literal* key = prop->key()->AsLiteral();
6981 Handle<String> name = Handle<String>::cast(key->value());
6982 DCHECK(!name.is_null());
6983
6984 HValue* access = BuildNamedAccess(STORE, ast_id, return_id, expr, slot,
6985 object, name, value, is_uninitialized);
6986 if (access == NULL) return;
6987
6988 if (!ast_context()->IsEffect()) Push(value);
6989 if (access->IsInstruction()) AddInstruction(HInstruction::cast(access));
6990 if (access->HasObservableSideEffects()) {
6991 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6992 }
6993 if (!ast_context()->IsEffect()) Drop(1);
6994 return ast_context()->ReturnValue(value);
6995}
6996
6997
6998void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6999 Property* prop = expr->target()->AsProperty();
7000 DCHECK(prop != NULL);
7001 CHECK_ALIVE(VisitForValue(prop->obj()));
7002 if (!prop->key()->IsPropertyName()) {
7003 CHECK_ALIVE(VisitForValue(prop->key()));
7004 }
7005 CHECK_ALIVE(VisitForValue(expr->value()));
7006 BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
7007 expr->AssignmentId(), expr->IsUninitialized());
7008}
7009
7010
7011// Because not every expression has a position and there is not common
7012// superclass of Assignment and CountOperation, we cannot just pass the
7013// owning expression instead of position and ast_id separately.
7014void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
7015 Variable* var, HValue* value, FeedbackVectorSlot slot, BailoutId ast_id) {
7016 Handle<JSGlobalObject> global(current_info()->global_object());
7017
7018 // Lookup in script contexts.
7019 {
7020 Handle<ScriptContextTable> script_contexts(
7021 global->native_context()->script_context_table());
7022 ScriptContextTable::LookupResult lookup;
7023 if (ScriptContextTable::Lookup(script_contexts, var->name(), &lookup)) {
7024 if (lookup.mode == CONST) {
7025 return Bailout(kNonInitializerAssignmentToConst);
7026 }
7027 Handle<Context> script_context =
7028 ScriptContextTable::GetContext(script_contexts, lookup.context_index);
7029
7030 Handle<Object> current_value =
Ben Murdoch097c5b22016-05-18 11:27:45 +01007031 FixedArray::get(*script_context, lookup.slot_index, isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007032
7033 // If the values is not the hole, it will stay initialized,
7034 // so no need to generate a check.
7035 if (*current_value == *isolate()->factory()->the_hole_value()) {
7036 return Bailout(kReferenceToUninitializedVariable);
7037 }
7038
7039 HStoreNamedField* instr = Add<HStoreNamedField>(
7040 Add<HConstant>(script_context),
7041 HObjectAccess::ForContextSlot(lookup.slot_index), value);
7042 USE(instr);
7043 DCHECK(instr->HasObservableSideEffects());
7044 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7045 return;
7046 }
7047 }
7048
7049 LookupIterator it(global, var->name(), LookupIterator::OWN);
7050 GlobalPropertyAccess type = LookupGlobalProperty(var, &it, STORE);
7051 if (type == kUseCell) {
7052 Handle<PropertyCell> cell = it.GetPropertyCell();
7053 top_info()->dependencies()->AssumePropertyCell(cell);
7054 auto cell_type = it.property_details().cell_type();
7055 if (cell_type == PropertyCellType::kConstant ||
7056 cell_type == PropertyCellType::kUndefined) {
7057 Handle<Object> constant(cell->value(), isolate());
7058 if (value->IsConstant()) {
7059 HConstant* c_value = HConstant::cast(value);
7060 if (!constant.is_identical_to(c_value->handle(isolate()))) {
7061 Add<HDeoptimize>(Deoptimizer::kConstantGlobalVariableAssignment,
7062 Deoptimizer::EAGER);
7063 }
7064 } else {
7065 HValue* c_constant = Add<HConstant>(constant);
7066 IfBuilder builder(this);
7067 if (constant->IsNumber()) {
7068 builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
7069 } else {
7070 builder.If<HCompareObjectEqAndBranch>(value, c_constant);
7071 }
7072 builder.Then();
7073 builder.Else();
7074 Add<HDeoptimize>(Deoptimizer::kConstantGlobalVariableAssignment,
7075 Deoptimizer::EAGER);
7076 builder.End();
7077 }
7078 }
7079 HConstant* cell_constant = Add<HConstant>(cell);
7080 auto access = HObjectAccess::ForPropertyCellValue();
7081 if (cell_type == PropertyCellType::kConstantType) {
7082 switch (cell->GetConstantType()) {
7083 case PropertyCellConstantType::kSmi:
7084 access = access.WithRepresentation(Representation::Smi());
7085 break;
7086 case PropertyCellConstantType::kStableMap: {
7087 // The map may no longer be stable, deopt if it's ever different from
7088 // what is currently there, which will allow for restablization.
7089 Handle<Map> map(HeapObject::cast(cell->value())->map());
7090 Add<HCheckHeapObject>(value);
7091 value = Add<HCheckMaps>(value, map);
7092 access = access.WithRepresentation(Representation::HeapObject());
7093 break;
7094 }
7095 }
7096 }
7097 HInstruction* instr = Add<HStoreNamedField>(cell_constant, access, value);
7098 instr->ClearChangesFlag(kInobjectFields);
7099 instr->SetChangesFlag(kGlobalVars);
7100 if (instr->HasObservableSideEffects()) {
7101 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7102 }
7103 } else {
7104 HValue* global_object = Add<HLoadNamedField>(
7105 BuildGetNativeContext(), nullptr,
7106 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
7107 HStoreNamedGeneric* instr =
7108 Add<HStoreNamedGeneric>(global_object, var->name(), value,
7109 function_language_mode(), PREMONOMORPHIC);
7110 Handle<TypeFeedbackVector> vector =
7111 handle(current_feedback_vector(), isolate());
7112 instr->SetVectorAndSlot(vector, slot);
7113 USE(instr);
7114 DCHECK(instr->HasObservableSideEffects());
7115 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7116 }
7117}
7118
7119
7120void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
7121 Expression* target = expr->target();
7122 VariableProxy* proxy = target->AsVariableProxy();
7123 Property* prop = target->AsProperty();
7124 DCHECK(proxy == NULL || prop == NULL);
7125
7126 // We have a second position recorded in the FullCodeGenerator to have
7127 // type feedback for the binary operation.
7128 BinaryOperation* operation = expr->binary_operation();
7129
7130 if (proxy != NULL) {
7131 Variable* var = proxy->var();
7132 if (var->mode() == LET) {
7133 return Bailout(kUnsupportedLetCompoundAssignment);
7134 }
7135
7136 CHECK_ALIVE(VisitForValue(operation));
7137
7138 switch (var->location()) {
7139 case VariableLocation::GLOBAL:
7140 case VariableLocation::UNALLOCATED:
7141 HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
7142 expr->AssignmentId());
7143 break;
7144
7145 case VariableLocation::PARAMETER:
7146 case VariableLocation::LOCAL:
7147 if (var->mode() == CONST_LEGACY) {
7148 return Bailout(kUnsupportedConstCompoundAssignment);
7149 }
7150 if (var->mode() == CONST) {
7151 return Bailout(kNonInitializerAssignmentToConst);
7152 }
7153 BindIfLive(var, Top());
7154 break;
7155
7156 case VariableLocation::CONTEXT: {
7157 // Bail out if we try to mutate a parameter value in a function
7158 // using the arguments object. We do not (yet) correctly handle the
7159 // arguments property of the function.
7160 if (current_info()->scope()->arguments() != NULL) {
7161 // Parameters will be allocated to context slots. We have no
7162 // direct way to detect that the variable is a parameter so we do
7163 // a linear search of the parameter variables.
7164 int count = current_info()->scope()->num_parameters();
7165 for (int i = 0; i < count; ++i) {
7166 if (var == current_info()->scope()->parameter(i)) {
7167 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
7168 }
7169 }
7170 }
7171
7172 HStoreContextSlot::Mode mode;
7173
7174 switch (var->mode()) {
7175 case LET:
7176 mode = HStoreContextSlot::kCheckDeoptimize;
7177 break;
7178 case CONST:
7179 return Bailout(kNonInitializerAssignmentToConst);
7180 case CONST_LEGACY:
Ben Murdochc5610432016-08-08 18:44:38 +01007181 if (is_strict(function_language_mode())) {
7182 return Bailout(kNonInitializerAssignmentToConst);
7183 } else {
7184 return ast_context()->ReturnValue(Pop());
7185 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007186 default:
7187 mode = HStoreContextSlot::kNoCheck;
7188 }
7189
7190 HValue* context = BuildContextChainWalk(var);
7191 HStoreContextSlot* instr = Add<HStoreContextSlot>(
7192 context, var->index(), mode, Top());
7193 if (instr->HasObservableSideEffects()) {
7194 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
7195 }
7196 break;
7197 }
7198
7199 case VariableLocation::LOOKUP:
7200 return Bailout(kCompoundAssignmentToLookupSlot);
7201 }
7202 return ast_context()->ReturnValue(Pop());
7203
7204 } else if (prop != NULL) {
7205 CHECK_ALIVE(VisitForValue(prop->obj()));
7206 HValue* object = Top();
7207 HValue* key = NULL;
7208 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
7209 CHECK_ALIVE(VisitForValue(prop->key()));
7210 key = Top();
7211 }
7212
7213 CHECK_ALIVE(PushLoad(prop, object, key));
7214
7215 CHECK_ALIVE(VisitForValue(expr->value()));
7216 HValue* right = Pop();
7217 HValue* left = Pop();
7218
7219 Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
7220
7221 BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
7222 expr->AssignmentId(), expr->IsUninitialized());
7223 } else {
7224 return Bailout(kInvalidLhsInCompoundAssignment);
7225 }
7226}
7227
7228
7229void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
7230 DCHECK(!HasStackOverflow());
7231 DCHECK(current_block() != NULL);
7232 DCHECK(current_block()->HasPredecessor());
7233
7234 VariableProxy* proxy = expr->target()->AsVariableProxy();
7235 Property* prop = expr->target()->AsProperty();
7236 DCHECK(proxy == NULL || prop == NULL);
7237
7238 if (expr->is_compound()) {
7239 HandleCompoundAssignment(expr);
7240 return;
7241 }
7242
7243 if (prop != NULL) {
7244 HandlePropertyAssignment(expr);
7245 } else if (proxy != NULL) {
7246 Variable* var = proxy->var();
7247
7248 if (var->mode() == CONST) {
7249 if (expr->op() != Token::INIT) {
7250 return Bailout(kNonInitializerAssignmentToConst);
7251 }
7252 } else if (var->mode() == CONST_LEGACY) {
7253 if (expr->op() != Token::INIT) {
Ben Murdochc5610432016-08-08 18:44:38 +01007254 if (is_strict(function_language_mode())) {
7255 return Bailout(kNonInitializerAssignmentToConst);
7256 } else {
7257 CHECK_ALIVE(VisitForValue(expr->value()));
7258 return ast_context()->ReturnValue(Pop());
7259 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007260 }
7261
Ben Murdochc5610432016-08-08 18:44:38 +01007262 // TODO(adamk): Is this required? Legacy const variables are always
7263 // initialized before use.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007264 if (var->IsStackAllocated()) {
7265 // We insert a use of the old value to detect unsupported uses of const
7266 // variables (e.g. initialization inside a loop).
7267 HValue* old_value = environment()->Lookup(var);
7268 Add<HUseConst>(old_value);
7269 }
7270 }
7271
7272 if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
7273
7274 // Handle the assignment.
7275 switch (var->location()) {
7276 case VariableLocation::GLOBAL:
7277 case VariableLocation::UNALLOCATED:
7278 CHECK_ALIVE(VisitForValue(expr->value()));
7279 HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
7280 expr->AssignmentId());
7281 return ast_context()->ReturnValue(Pop());
7282
7283 case VariableLocation::PARAMETER:
7284 case VariableLocation::LOCAL: {
7285 // Perform an initialization check for let declared variables
7286 // or parameters.
7287 if (var->mode() == LET && expr->op() == Token::ASSIGN) {
7288 HValue* env_value = environment()->Lookup(var);
7289 if (env_value == graph()->GetConstantHole()) {
7290 return Bailout(kAssignmentToLetVariableBeforeInitialization);
7291 }
7292 }
7293 // We do not allow the arguments object to occur in a context where it
7294 // may escape, but assignments to stack-allocated locals are
7295 // permitted.
7296 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
7297 HValue* value = Pop();
7298 BindIfLive(var, value);
7299 return ast_context()->ReturnValue(value);
7300 }
7301
7302 case VariableLocation::CONTEXT: {
7303 // Bail out if we try to mutate a parameter value in a function using
7304 // the arguments object. We do not (yet) correctly handle the
7305 // arguments property of the function.
7306 if (current_info()->scope()->arguments() != NULL) {
7307 // Parameters will rewrite to context slots. We have no direct way
7308 // to detect that the variable is a parameter.
7309 int count = current_info()->scope()->num_parameters();
7310 for (int i = 0; i < count; ++i) {
7311 if (var == current_info()->scope()->parameter(i)) {
7312 return Bailout(kAssignmentToParameterInArgumentsObject);
7313 }
7314 }
7315 }
7316
7317 CHECK_ALIVE(VisitForValue(expr->value()));
7318 HStoreContextSlot::Mode mode;
7319 if (expr->op() == Token::ASSIGN) {
7320 switch (var->mode()) {
7321 case LET:
7322 mode = HStoreContextSlot::kCheckDeoptimize;
7323 break;
7324 case CONST:
7325 // This case is checked statically so no need to
7326 // perform checks here
7327 UNREACHABLE();
7328 case CONST_LEGACY:
7329 return ast_context()->ReturnValue(Pop());
7330 default:
7331 mode = HStoreContextSlot::kNoCheck;
7332 }
7333 } else {
7334 DCHECK_EQ(Token::INIT, expr->op());
Ben Murdochc5610432016-08-08 18:44:38 +01007335 mode = HStoreContextSlot::kNoCheck;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007336 }
7337
7338 HValue* context = BuildContextChainWalk(var);
7339 HStoreContextSlot* instr = Add<HStoreContextSlot>(
7340 context, var->index(), mode, Top());
7341 if (instr->HasObservableSideEffects()) {
7342 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
7343 }
7344 return ast_context()->ReturnValue(Pop());
7345 }
7346
7347 case VariableLocation::LOOKUP:
7348 return Bailout(kAssignmentToLOOKUPVariable);
7349 }
7350 } else {
7351 return Bailout(kInvalidLeftHandSideInAssignment);
7352 }
7353}
7354
7355
7356void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
7357 // Generators are not optimized, so we should never get here.
7358 UNREACHABLE();
7359}
7360
7361
7362void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
7363 DCHECK(!HasStackOverflow());
7364 DCHECK(current_block() != NULL);
7365 DCHECK(current_block()->HasPredecessor());
7366 if (!ast_context()->IsEffect()) {
7367 // The parser turns invalid left-hand sides in assignments into throw
7368 // statements, which may not be in effect contexts. We might still try
7369 // to optimize such functions; bail out now if we do.
7370 return Bailout(kInvalidLeftHandSideInAssignment);
7371 }
7372 CHECK_ALIVE(VisitForValue(expr->exception()));
7373
7374 HValue* value = environment()->Pop();
7375 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
7376 Add<HPushArguments>(value);
7377 Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kThrow), 1);
7378 Add<HSimulate>(expr->id());
7379
7380 // If the throw definitely exits the function, we can finish with a dummy
7381 // control flow at this point. This is not the case if the throw is inside
7382 // an inlined function which may be replaced.
7383 if (call_context() == NULL) {
7384 FinishExitCurrentBlock(New<HAbnormalExit>());
7385 }
7386}
7387
7388
7389HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
7390 if (string->IsConstant()) {
7391 HConstant* c_string = HConstant::cast(string);
7392 if (c_string->HasStringValue()) {
7393 return Add<HConstant>(c_string->StringValue()->map()->instance_type());
7394 }
7395 }
7396 return Add<HLoadNamedField>(
7397 Add<HLoadNamedField>(string, nullptr, HObjectAccess::ForMap()), nullptr,
7398 HObjectAccess::ForMapInstanceType());
7399}
7400
7401
7402HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
7403 return AddInstruction(BuildLoadStringLength(string));
7404}
7405
7406
7407HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* string) {
7408 if (string->IsConstant()) {
7409 HConstant* c_string = HConstant::cast(string);
7410 if (c_string->HasStringValue()) {
7411 return New<HConstant>(c_string->StringValue()->length());
7412 }
7413 }
7414 return New<HLoadNamedField>(string, nullptr,
7415 HObjectAccess::ForStringLength());
7416}
7417
7418
7419HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
7420 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
7421 HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized) {
7422 if (is_uninitialized) {
7423 Add<HDeoptimize>(
7424 Deoptimizer::kInsufficientTypeFeedbackForGenericNamedAccess,
7425 Deoptimizer::SOFT);
7426 }
7427 if (access_type == LOAD) {
7428 Handle<TypeFeedbackVector> vector =
7429 handle(current_feedback_vector(), isolate());
7430
7431 if (!expr->AsProperty()->key()->IsPropertyName()) {
7432 // It's possible that a keyed load of a constant string was converted
7433 // to a named load. Here, at the last minute, we need to make sure to
7434 // use a generic Keyed Load if we are using the type vector, because
7435 // it has to share information with full code.
7436 HConstant* key = Add<HConstant>(name);
Ben Murdoch097c5b22016-05-18 11:27:45 +01007437 HLoadKeyedGeneric* result =
7438 New<HLoadKeyedGeneric>(object, key, PREMONOMORPHIC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007439 result->SetVectorAndSlot(vector, slot);
7440 return result;
7441 }
7442
Ben Murdoch097c5b22016-05-18 11:27:45 +01007443 HLoadNamedGeneric* result =
7444 New<HLoadNamedGeneric>(object, name, PREMONOMORPHIC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007445 result->SetVectorAndSlot(vector, slot);
7446 return result;
7447 } else {
7448 if (current_feedback_vector()->GetKind(slot) ==
7449 FeedbackVectorSlotKind::KEYED_STORE_IC) {
7450 // It's possible that a keyed store of a constant string was converted
7451 // to a named store. Here, at the last minute, we need to make sure to
7452 // use a generic Keyed Store if we are using the type vector, because
7453 // it has to share information with full code.
7454 HConstant* key = Add<HConstant>(name);
7455 HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
7456 object, key, value, function_language_mode(), PREMONOMORPHIC);
7457 Handle<TypeFeedbackVector> vector =
7458 handle(current_feedback_vector(), isolate());
7459 result->SetVectorAndSlot(vector, slot);
7460 return result;
7461 }
7462
7463 HStoreNamedGeneric* result = New<HStoreNamedGeneric>(
7464 object, name, value, function_language_mode(), PREMONOMORPHIC);
7465 Handle<TypeFeedbackVector> vector =
7466 handle(current_feedback_vector(), isolate());
7467 result->SetVectorAndSlot(vector, slot);
7468 return result;
7469 }
7470}
7471
7472
7473HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
7474 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
7475 HValue* object, HValue* key, HValue* value) {
7476 if (access_type == LOAD) {
7477 InlineCacheState initial_state = expr->AsProperty()->GetInlineCacheState();
Ben Murdoch097c5b22016-05-18 11:27:45 +01007478 HLoadKeyedGeneric* result =
7479 New<HLoadKeyedGeneric>(object, key, initial_state);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007480 // HLoadKeyedGeneric with vector ics benefits from being encoded as
7481 // MEGAMORPHIC because the vector/slot combo becomes unnecessary.
7482 if (initial_state != MEGAMORPHIC) {
7483 // We need to pass vector information.
7484 Handle<TypeFeedbackVector> vector =
7485 handle(current_feedback_vector(), isolate());
7486 result->SetVectorAndSlot(vector, slot);
7487 }
7488 return result;
7489 } else {
7490 HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
7491 object, key, value, function_language_mode(), PREMONOMORPHIC);
7492 Handle<TypeFeedbackVector> vector =
7493 handle(current_feedback_vector(), isolate());
7494 result->SetVectorAndSlot(vector, slot);
7495 return result;
7496 }
7497}
7498
7499
7500LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
7501 // Loads from a "stock" fast holey double arrays can elide the hole check.
7502 // Loads from a "stock" fast holey array can convert the hole to undefined
7503 // with impunity.
7504 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
7505 bool holey_double_elements =
7506 *map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS);
7507 bool holey_elements =
7508 *map == isolate()->get_initial_js_array_map(FAST_HOLEY_ELEMENTS);
7509 if ((holey_double_elements || holey_elements) &&
7510 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
7511 load_mode =
7512 holey_double_elements ? ALLOW_RETURN_HOLE : CONVERT_HOLE_TO_UNDEFINED;
7513
7514 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
7515 Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
7516 BuildCheckPrototypeMaps(prototype, object_prototype);
7517 graph()->MarkDependsOnEmptyArrayProtoElements();
7518 }
7519 return load_mode;
7520}
7521
7522
7523HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
7524 HValue* object,
7525 HValue* key,
7526 HValue* val,
7527 HValue* dependency,
7528 Handle<Map> map,
7529 PropertyAccessType access_type,
7530 KeyedAccessStoreMode store_mode) {
7531 HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
7532
7533 if (access_type == STORE && map->prototype()->IsJSObject()) {
7534 // monomorphic stores need a prototype chain check because shape
7535 // changes could allow callbacks on elements in the chain that
7536 // aren't compatible with monomorphic keyed stores.
7537 PrototypeIterator iter(map);
7538 JSObject* holder = NULL;
7539 while (!iter.IsAtEnd()) {
7540 // JSProxies can't occur here because we wouldn't have installed a
7541 // non-generic IC if there were any.
7542 holder = *PrototypeIterator::GetCurrent<JSObject>(iter);
7543 iter.Advance();
7544 }
7545 DCHECK(holder && holder->IsJSObject());
7546
7547 BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
7548 Handle<JSObject>(holder));
7549 }
7550
7551 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7552 return BuildUncheckedMonomorphicElementAccess(
7553 checked_object, key, val,
7554 map->instance_type() == JS_ARRAY_TYPE,
7555 map->elements_kind(), access_type,
7556 load_mode, store_mode);
7557}
7558
7559
7560static bool CanInlineElementAccess(Handle<Map> map) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01007561 return map->IsJSObjectMap() &&
7562 (map->has_fast_elements() || map->has_fixed_typed_array_elements()) &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007563 !map->has_indexed_interceptor() && !map->is_access_check_needed();
7564}
7565
7566
7567HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
7568 HValue* object,
7569 HValue* key,
7570 HValue* val,
7571 SmallMapList* maps) {
7572 // For polymorphic loads of similar elements kinds (i.e. all tagged or all
7573 // double), always use the "worst case" code without a transition. This is
7574 // much faster than transitioning the elements to the worst case, trading a
7575 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
7576 bool has_double_maps = false;
7577 bool has_smi_or_object_maps = false;
7578 bool has_js_array_access = false;
7579 bool has_non_js_array_access = false;
7580 bool has_seen_holey_elements = false;
7581 Handle<Map> most_general_consolidated_map;
7582 for (int i = 0; i < maps->length(); ++i) {
7583 Handle<Map> map = maps->at(i);
7584 if (!CanInlineElementAccess(map)) return NULL;
7585 // Don't allow mixing of JSArrays with JSObjects.
7586 if (map->instance_type() == JS_ARRAY_TYPE) {
7587 if (has_non_js_array_access) return NULL;
7588 has_js_array_access = true;
7589 } else if (has_js_array_access) {
7590 return NULL;
7591 } else {
7592 has_non_js_array_access = true;
7593 }
7594 // Don't allow mixed, incompatible elements kinds.
7595 if (map->has_fast_double_elements()) {
7596 if (has_smi_or_object_maps) return NULL;
7597 has_double_maps = true;
7598 } else if (map->has_fast_smi_or_object_elements()) {
7599 if (has_double_maps) return NULL;
7600 has_smi_or_object_maps = true;
7601 } else {
7602 return NULL;
7603 }
7604 // Remember if we've ever seen holey elements.
7605 if (IsHoleyElementsKind(map->elements_kind())) {
7606 has_seen_holey_elements = true;
7607 }
7608 // Remember the most general elements kind, the code for its load will
7609 // properly handle all of the more specific cases.
7610 if ((i == 0) || IsMoreGeneralElementsKindTransition(
7611 most_general_consolidated_map->elements_kind(),
7612 map->elements_kind())) {
7613 most_general_consolidated_map = map;
7614 }
7615 }
7616 if (!has_double_maps && !has_smi_or_object_maps) return NULL;
7617
7618 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
7619 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
7620 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
7621 ElementsKind consolidated_elements_kind = has_seen_holey_elements
7622 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
7623 : most_general_consolidated_map->elements_kind();
7624 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
7625 if (has_seen_holey_elements) {
7626 // Make sure that all of the maps we are handling have the initial array
7627 // prototype.
7628 bool saw_non_array_prototype = false;
7629 for (int i = 0; i < maps->length(); ++i) {
7630 Handle<Map> map = maps->at(i);
7631 if (map->prototype() != *isolate()->initial_array_prototype()) {
7632 // We can't guarantee that loading the hole is safe. The prototype may
7633 // have an element at this position.
7634 saw_non_array_prototype = true;
7635 break;
7636 }
7637 }
7638
7639 if (!saw_non_array_prototype) {
7640 Handle<Map> holey_map = handle(
7641 isolate()->get_initial_js_array_map(consolidated_elements_kind));
7642 load_mode = BuildKeyedHoleMode(holey_map);
7643 if (load_mode != NEVER_RETURN_HOLE) {
7644 for (int i = 0; i < maps->length(); ++i) {
7645 Handle<Map> map = maps->at(i);
7646 // The prototype check was already done for the holey map in
7647 // BuildKeyedHoleMode.
7648 if (!map.is_identical_to(holey_map)) {
7649 Handle<JSObject> prototype(JSObject::cast(map->prototype()),
7650 isolate());
7651 Handle<JSObject> object_prototype =
7652 isolate()->initial_object_prototype();
7653 BuildCheckPrototypeMaps(prototype, object_prototype);
7654 }
7655 }
7656 }
7657 }
7658 }
7659 HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
7660 checked_object, key, val,
7661 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
7662 consolidated_elements_kind, LOAD, load_mode, STANDARD_STORE);
7663 return instr;
7664}
7665
7666
7667HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
7668 Expression* expr, FeedbackVectorSlot slot, HValue* object, HValue* key,
7669 HValue* val, SmallMapList* maps, PropertyAccessType access_type,
7670 KeyedAccessStoreMode store_mode, bool* has_side_effects) {
7671 *has_side_effects = false;
7672 BuildCheckHeapObject(object);
7673
7674 if (access_type == LOAD) {
7675 HInstruction* consolidated_load =
7676 TryBuildConsolidatedElementLoad(object, key, val, maps);
7677 if (consolidated_load != NULL) {
7678 *has_side_effects |= consolidated_load->HasObservableSideEffects();
7679 return consolidated_load;
7680 }
7681 }
7682
7683 // Elements_kind transition support.
7684 MapHandleList transition_target(maps->length());
7685 // Collect possible transition targets.
7686 MapHandleList possible_transitioned_maps(maps->length());
7687 for (int i = 0; i < maps->length(); ++i) {
7688 Handle<Map> map = maps->at(i);
7689 // Loads from strings or loads with a mix of string and non-string maps
7690 // shouldn't be handled polymorphically.
7691 DCHECK(access_type != LOAD || !map->IsStringMap());
7692 ElementsKind elements_kind = map->elements_kind();
7693 if (CanInlineElementAccess(map) && IsFastElementsKind(elements_kind) &&
7694 elements_kind != GetInitialFastElementsKind()) {
7695 possible_transitioned_maps.Add(map);
7696 }
7697 if (IsSloppyArgumentsElements(elements_kind)) {
7698 HInstruction* result =
7699 BuildKeyedGeneric(access_type, expr, slot, object, key, val);
7700 *has_side_effects = result->HasObservableSideEffects();
7701 return AddInstruction(result);
7702 }
7703 }
7704 // Get transition target for each map (NULL == no transition).
7705 for (int i = 0; i < maps->length(); ++i) {
7706 Handle<Map> map = maps->at(i);
Ben Murdochc5610432016-08-08 18:44:38 +01007707 Map* transitioned_map =
7708 map->FindElementsKindTransitionedMap(&possible_transitioned_maps);
7709 if (transitioned_map != nullptr) {
7710 transition_target.Add(handle(transitioned_map));
7711 } else {
7712 transition_target.Add(Handle<Map>());
7713 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007714 }
7715
7716 MapHandleList untransitionable_maps(maps->length());
7717 HTransitionElementsKind* transition = NULL;
7718 for (int i = 0; i < maps->length(); ++i) {
7719 Handle<Map> map = maps->at(i);
7720 DCHECK(map->IsMap());
7721 if (!transition_target.at(i).is_null()) {
7722 DCHECK(Map::IsValidElementsTransition(
7723 map->elements_kind(),
7724 transition_target.at(i)->elements_kind()));
7725 transition = Add<HTransitionElementsKind>(object, map,
7726 transition_target.at(i));
7727 } else {
7728 untransitionable_maps.Add(map);
7729 }
7730 }
7731
7732 // If only one map is left after transitioning, handle this case
7733 // monomorphically.
7734 DCHECK(untransitionable_maps.length() >= 1);
7735 if (untransitionable_maps.length() == 1) {
7736 Handle<Map> untransitionable_map = untransitionable_maps[0];
7737 HInstruction* instr = NULL;
7738 if (!CanInlineElementAccess(untransitionable_map)) {
7739 instr = AddInstruction(
7740 BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7741 } else {
7742 instr = BuildMonomorphicElementAccess(
7743 object, key, val, transition, untransitionable_map, access_type,
7744 store_mode);
7745 }
7746 *has_side_effects |= instr->HasObservableSideEffects();
7747 return access_type == STORE ? val : instr;
7748 }
7749
7750 HBasicBlock* join = graph()->CreateBasicBlock();
7751
7752 for (int i = 0; i < untransitionable_maps.length(); ++i) {
7753 Handle<Map> map = untransitionable_maps[i];
7754 ElementsKind elements_kind = map->elements_kind();
7755 HBasicBlock* this_map = graph()->CreateBasicBlock();
7756 HBasicBlock* other_map = graph()->CreateBasicBlock();
7757 HCompareMap* mapcompare =
7758 New<HCompareMap>(object, map, this_map, other_map);
7759 FinishCurrentBlock(mapcompare);
7760
7761 set_current_block(this_map);
7762 HInstruction* access = NULL;
7763 if (!CanInlineElementAccess(map)) {
7764 access = AddInstruction(
7765 BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7766 } else {
7767 DCHECK(IsFastElementsKind(elements_kind) ||
7768 IsFixedTypedArrayElementsKind(elements_kind));
7769 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7770 // Happily, mapcompare is a checked object.
7771 access = BuildUncheckedMonomorphicElementAccess(
7772 mapcompare, key, val,
7773 map->instance_type() == JS_ARRAY_TYPE,
7774 elements_kind, access_type,
7775 load_mode,
7776 store_mode);
7777 }
7778 *has_side_effects |= access->HasObservableSideEffects();
7779 // The caller will use has_side_effects and add a correct Simulate.
7780 access->SetFlag(HValue::kHasNoObservableSideEffects);
7781 if (access_type == LOAD) {
7782 Push(access);
7783 }
7784 NoObservableSideEffectsScope scope(this);
7785 GotoNoSimulate(join);
7786 set_current_block(other_map);
7787 }
7788
7789 // Ensure that we visited at least one map above that goes to join. This is
7790 // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
7791 // rather than joining the join block. If this becomes an issue, insert a
7792 // generic access in the case length() == 0.
7793 DCHECK(join->predecessors()->length() > 0);
7794 // Deopt if none of the cases matched.
7795 NoObservableSideEffectsScope scope(this);
7796 FinishExitWithHardDeoptimization(
7797 Deoptimizer::kUnknownMapInPolymorphicElementAccess);
7798 set_current_block(join);
7799 return access_type == STORE ? val : Pop();
7800}
7801
7802
7803HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
7804 HValue* obj, HValue* key, HValue* val, Expression* expr,
7805 FeedbackVectorSlot slot, BailoutId ast_id, BailoutId return_id,
7806 PropertyAccessType access_type, bool* has_side_effects) {
7807 // A keyed name access with type feedback may contain the name.
7808 Handle<TypeFeedbackVector> vector =
7809 handle(current_feedback_vector(), isolate());
7810 HValue* expected_key = key;
7811 if (!key->ActualValue()->IsConstant()) {
7812 Name* name = nullptr;
7813 if (access_type == LOAD) {
7814 KeyedLoadICNexus nexus(vector, slot);
7815 name = nexus.FindFirstName();
7816 } else {
7817 KeyedStoreICNexus nexus(vector, slot);
7818 name = nexus.FindFirstName();
7819 }
7820 if (name != nullptr) {
7821 Handle<Name> handle_name(name);
7822 expected_key = Add<HConstant>(handle_name);
7823 // We need a check against the key.
7824 bool in_new_space = isolate()->heap()->InNewSpace(*handle_name);
7825 Unique<Name> unique_name = Unique<Name>::CreateUninitialized(handle_name);
7826 Add<HCheckValue>(key, unique_name, in_new_space);
7827 }
7828 }
7829 if (expected_key->ActualValue()->IsConstant()) {
7830 Handle<Object> constant =
7831 HConstant::cast(expected_key->ActualValue())->handle(isolate());
7832 uint32_t array_index;
7833 if ((constant->IsString() &&
7834 !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) ||
7835 constant->IsSymbol()) {
7836 if (!constant->IsUniqueName()) {
7837 constant = isolate()->factory()->InternalizeString(
7838 Handle<String>::cast(constant));
7839 }
7840 HValue* access =
7841 BuildNamedAccess(access_type, ast_id, return_id, expr, slot, obj,
7842 Handle<Name>::cast(constant), val, false);
7843 if (access == NULL || access->IsPhi() ||
7844 HInstruction::cast(access)->IsLinked()) {
7845 *has_side_effects = false;
7846 } else {
7847 HInstruction* instr = HInstruction::cast(access);
7848 AddInstruction(instr);
7849 *has_side_effects = instr->HasObservableSideEffects();
7850 }
7851 return access;
7852 }
7853 }
7854
7855 DCHECK(!expr->IsPropertyName());
7856 HInstruction* instr = NULL;
7857
7858 SmallMapList* maps;
Ben Murdoch097c5b22016-05-18 11:27:45 +01007859 bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007860
7861 bool force_generic = false;
7862 if (expr->GetKeyType() == PROPERTY) {
7863 // Non-Generic accesses assume that elements are being accessed, and will
7864 // deopt for non-index keys, which the IC knows will occur.
7865 // TODO(jkummerow): Consider adding proper support for property accesses.
7866 force_generic = true;
7867 monomorphic = false;
7868 } else if (access_type == STORE &&
7869 (monomorphic || (maps != NULL && !maps->is_empty()))) {
7870 // Stores can't be mono/polymorphic if their prototype chain has dictionary
7871 // elements. However a receiver map that has dictionary elements itself
7872 // should be left to normal mono/poly behavior (the other maps may benefit
7873 // from highly optimized stores).
7874 for (int i = 0; i < maps->length(); i++) {
7875 Handle<Map> current_map = maps->at(i);
7876 if (current_map->DictionaryElementsInPrototypeChainOnly()) {
7877 force_generic = true;
7878 monomorphic = false;
7879 break;
7880 }
7881 }
7882 } else if (access_type == LOAD && !monomorphic &&
7883 (maps != NULL && !maps->is_empty())) {
7884 // Polymorphic loads have to go generic if any of the maps are strings.
7885 // If some, but not all of the maps are strings, we should go generic
7886 // because polymorphic access wants to key on ElementsKind and isn't
7887 // compatible with strings.
7888 for (int i = 0; i < maps->length(); i++) {
7889 Handle<Map> current_map = maps->at(i);
7890 if (current_map->IsStringMap()) {
7891 force_generic = true;
7892 break;
7893 }
7894 }
7895 }
7896
7897 if (monomorphic) {
7898 Handle<Map> map = maps->first();
7899 if (!CanInlineElementAccess(map)) {
7900 instr = AddInstruction(
7901 BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7902 } else {
7903 BuildCheckHeapObject(obj);
7904 instr = BuildMonomorphicElementAccess(
7905 obj, key, val, NULL, map, access_type, expr->GetStoreMode());
7906 }
7907 } else if (!force_generic && (maps != NULL && !maps->is_empty())) {
7908 return HandlePolymorphicElementAccess(expr, slot, obj, key, val, maps,
7909 access_type, expr->GetStoreMode(),
7910 has_side_effects);
7911 } else {
7912 if (access_type == STORE) {
7913 if (expr->IsAssignment() &&
7914 expr->AsAssignment()->HasNoTypeInformation()) {
7915 Add<HDeoptimize>(Deoptimizer::kInsufficientTypeFeedbackForKeyedStore,
7916 Deoptimizer::SOFT);
7917 }
7918 } else {
7919 if (expr->AsProperty()->HasNoTypeInformation()) {
7920 Add<HDeoptimize>(Deoptimizer::kInsufficientTypeFeedbackForKeyedLoad,
7921 Deoptimizer::SOFT);
7922 }
7923 }
7924 instr = AddInstruction(
7925 BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7926 }
7927 *has_side_effects = instr->HasObservableSideEffects();
7928 return instr;
7929}
7930
7931
7932void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
7933 // Outermost function already has arguments on the stack.
7934 if (function_state()->outer() == NULL) return;
7935
7936 if (function_state()->arguments_pushed()) return;
7937
7938 // Push arguments when entering inlined function.
7939 HEnterInlined* entry = function_state()->entry();
7940 entry->set_arguments_pushed();
7941
7942 HArgumentsObject* arguments = entry->arguments_object();
7943 const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
7944
7945 HInstruction* insert_after = entry;
7946 for (int i = 0; i < arguments_values->length(); i++) {
7947 HValue* argument = arguments_values->at(i);
7948 HInstruction* push_argument = New<HPushArguments>(argument);
7949 push_argument->InsertAfter(insert_after);
7950 insert_after = push_argument;
7951 }
7952
7953 HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
7954 arguments_elements->ClearFlag(HValue::kUseGVN);
7955 arguments_elements->InsertAfter(insert_after);
7956 function_state()->set_arguments_elements(arguments_elements);
7957}
7958
7959
7960bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
7961 VariableProxy* proxy = expr->obj()->AsVariableProxy();
7962 if (proxy == NULL) return false;
7963 if (!proxy->var()->IsStackAllocated()) return false;
7964 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
7965 return false;
7966 }
7967
7968 HInstruction* result = NULL;
7969 if (expr->key()->IsPropertyName()) {
7970 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7971 if (!String::Equals(name, isolate()->factory()->length_string())) {
7972 return false;
7973 }
7974
7975 if (function_state()->outer() == NULL) {
7976 HInstruction* elements = Add<HArgumentsElements>(false);
7977 result = New<HArgumentsLength>(elements);
7978 } else {
7979 // Number of arguments without receiver.
7980 int argument_count = environment()->
7981 arguments_environment()->parameter_count() - 1;
7982 result = New<HConstant>(argument_count);
7983 }
7984 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01007985 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->obj(), ARGUMENTS_ALLOWED), true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007986 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
7987 HValue* key = Pop();
7988 Drop(1); // Arguments object.
7989 if (function_state()->outer() == NULL) {
7990 HInstruction* elements = Add<HArgumentsElements>(false);
7991 HInstruction* length = Add<HArgumentsLength>(elements);
7992 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7993 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7994 } else {
7995 EnsureArgumentsArePushedForAccess();
7996
7997 // Number of arguments without receiver.
7998 HInstruction* elements = function_state()->arguments_elements();
7999 int argument_count = environment()->
8000 arguments_environment()->parameter_count() - 1;
8001 HInstruction* length = Add<HConstant>(argument_count);
8002 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
8003 result = New<HAccessArgumentsAt>(elements, length, checked_key);
8004 }
8005 }
8006 ast_context()->ReturnInstruction(result, expr->id());
8007 return true;
8008}
8009
8010
8011HValue* HOptimizedGraphBuilder::BuildNamedAccess(
8012 PropertyAccessType access, BailoutId ast_id, BailoutId return_id,
8013 Expression* expr, FeedbackVectorSlot slot, HValue* object,
8014 Handle<Name> name, HValue* value, bool is_uninitialized) {
8015 SmallMapList* maps;
Ben Murdoch097c5b22016-05-18 11:27:45 +01008016 ComputeReceiverTypes(expr, object, &maps, this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008017 DCHECK(maps != NULL);
8018
8019 if (maps->length() > 0) {
8020 PropertyAccessInfo info(this, access, maps->first(), name);
8021 if (!info.CanAccessAsMonomorphic(maps)) {
8022 HandlePolymorphicNamedFieldAccess(access, expr, slot, ast_id, return_id,
8023 object, value, maps, name);
8024 return NULL;
8025 }
8026
8027 HValue* checked_object;
8028 // Type::Number() is only supported by polymorphic load/call handling.
8029 DCHECK(!info.IsNumberType());
8030 BuildCheckHeapObject(object);
8031 if (AreStringTypes(maps)) {
8032 checked_object =
8033 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
8034 } else {
8035 checked_object = Add<HCheckMaps>(object, maps);
8036 }
8037 return BuildMonomorphicAccess(
8038 &info, object, checked_object, value, ast_id, return_id);
8039 }
8040
8041 return BuildNamedGeneric(access, expr, slot, object, name, value,
8042 is_uninitialized);
8043}
8044
8045
8046void HOptimizedGraphBuilder::PushLoad(Property* expr,
8047 HValue* object,
8048 HValue* key) {
8049 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
8050 Push(object);
8051 if (key != NULL) Push(key);
8052 BuildLoad(expr, expr->LoadId());
8053}
8054
8055
8056void HOptimizedGraphBuilder::BuildLoad(Property* expr,
8057 BailoutId ast_id) {
8058 HInstruction* instr = NULL;
8059 if (expr->IsStringAccess() && expr->GetKeyType() == ELEMENT) {
8060 HValue* index = Pop();
8061 HValue* string = Pop();
8062 HInstruction* char_code = BuildStringCharCodeAt(string, index);
8063 AddInstruction(char_code);
8064 instr = NewUncasted<HStringCharFromCode>(char_code);
8065
8066 } else if (expr->key()->IsPropertyName()) {
8067 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
8068 HValue* object = Pop();
8069
8070 HValue* value = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
8071 expr->PropertyFeedbackSlot(), object, name,
8072 NULL, expr->IsUninitialized());
8073 if (value == NULL) return;
8074 if (value->IsPhi()) return ast_context()->ReturnValue(value);
8075 instr = HInstruction::cast(value);
8076 if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
8077
8078 } else {
8079 HValue* key = Pop();
8080 HValue* obj = Pop();
8081
8082 bool has_side_effects = false;
8083 HValue* load = HandleKeyedElementAccess(
8084 obj, key, NULL, expr, expr->PropertyFeedbackSlot(), ast_id,
8085 expr->LoadId(), LOAD, &has_side_effects);
8086 if (has_side_effects) {
8087 if (ast_context()->IsEffect()) {
8088 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8089 } else {
8090 Push(load);
8091 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8092 Drop(1);
8093 }
8094 }
8095 if (load == NULL) return;
8096 return ast_context()->ReturnValue(load);
8097 }
8098 return ast_context()->ReturnInstruction(instr, ast_id);
8099}
8100
8101
8102void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
8103 DCHECK(!HasStackOverflow());
8104 DCHECK(current_block() != NULL);
8105 DCHECK(current_block()->HasPredecessor());
8106
8107 if (TryArgumentsAccess(expr)) return;
8108
8109 CHECK_ALIVE(VisitForValue(expr->obj()));
8110 if (!expr->key()->IsPropertyName() || expr->IsStringAccess()) {
8111 CHECK_ALIVE(VisitForValue(expr->key()));
8112 }
8113
8114 BuildLoad(expr, expr->id());
8115}
8116
8117
8118HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
8119 HCheckMaps* check = Add<HCheckMaps>(
8120 Add<HConstant>(constant), handle(constant->map()));
8121 check->ClearDependsOnFlag(kElementsKind);
8122 return check;
8123}
8124
8125
8126HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
8127 Handle<JSObject> holder) {
8128 PrototypeIterator iter(isolate(), prototype,
8129 PrototypeIterator::START_AT_RECEIVER);
8130 while (holder.is_null() ||
8131 !PrototypeIterator::GetCurrent(iter).is_identical_to(holder)) {
8132 BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
8133 iter.Advance();
8134 if (iter.IsAtEnd()) {
8135 return NULL;
8136 }
8137 }
8138 return BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
8139}
8140
8141
8142void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
8143 Handle<Map> receiver_map) {
8144 if (!holder.is_null()) {
8145 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8146 BuildCheckPrototypeMaps(prototype, holder);
8147 }
8148}
8149
Ben Murdochda12d292016-06-02 14:46:10 +01008150void HOptimizedGraphBuilder::BuildEnsureCallable(HValue* object) {
8151 NoObservableSideEffectsScope scope(this);
8152 const Runtime::Function* throw_called_non_callable =
8153 Runtime::FunctionForId(Runtime::kThrowCalledNonCallable);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008154
Ben Murdochda12d292016-06-02 14:46:10 +01008155 IfBuilder is_not_function(this);
8156 HValue* smi_check = is_not_function.If<HIsSmiAndBranch>(object);
8157 is_not_function.Or();
8158 HValue* map = AddLoadMap(object, smi_check);
8159 HValue* bit_field =
8160 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField());
8161 HValue* bit_field_masked = AddUncasted<HBitwise>(
8162 Token::BIT_AND, bit_field, Add<HConstant>(1 << Map::kIsCallable));
8163 is_not_function.IfNot<HCompareNumericAndBranch>(
8164 bit_field_masked, Add<HConstant>(1 << Map::kIsCallable), Token::EQ);
8165 is_not_function.Then();
8166 {
8167 Add<HPushArguments>(object);
8168 Add<HCallRuntime>(throw_called_non_callable, 1);
8169 }
8170 is_not_function.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008171}
8172
Ben Murdochda12d292016-06-02 14:46:10 +01008173HInstruction* HOptimizedGraphBuilder::NewCallFunction(
8174 HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode,
8175 ConvertReceiverMode convert_mode, TailCallMode tail_call_mode) {
8176 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
8177 BuildEnsureCallable(function);
8178 } else {
8179 DCHECK_EQ(TailCallMode::kDisallow, tail_call_mode);
8180 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008181 HValue* arity = Add<HConstant>(argument_count - 1);
8182
Ben Murdochda12d292016-06-02 14:46:10 +01008183 HValue* op_vals[] = {context(), function, arity};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008184
Ben Murdochda12d292016-06-02 14:46:10 +01008185 Callable callable =
8186 CodeFactory::Call(isolate(), convert_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008187 HConstant* stub = Add<HConstant>(callable.code());
8188
8189 return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
Ben Murdochc5610432016-08-08 18:44:38 +01008190 ArrayVector(op_vals),
Ben Murdochda12d292016-06-02 14:46:10 +01008191 syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008192}
8193
Ben Murdochda12d292016-06-02 14:46:10 +01008194HInstruction* HOptimizedGraphBuilder::NewCallFunctionViaIC(
8195 HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode,
8196 ConvertReceiverMode convert_mode, TailCallMode tail_call_mode,
8197 FeedbackVectorSlot slot) {
8198 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
8199 BuildEnsureCallable(function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008200 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01008201 DCHECK_EQ(TailCallMode::kDisallow, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008202 }
Ben Murdochda12d292016-06-02 14:46:10 +01008203 int arity = argument_count - 1;
8204 Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
8205 HValue* index_val = Add<HConstant>(vector->GetIndex(slot));
8206 HValue* vector_val = Add<HConstant>(vector);
8207
8208 HValue* op_vals[] = {context(), function, index_val, vector_val};
8209
8210 Callable callable = CodeFactory::CallICInOptimizedCode(
8211 isolate(), arity, convert_mode, tail_call_mode);
8212 HConstant* stub = Add<HConstant>(callable.code());
8213
8214 return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
Ben Murdochc5610432016-08-08 18:44:38 +01008215 ArrayVector(op_vals),
Ben Murdochda12d292016-06-02 14:46:10 +01008216 syntactic_tail_call_mode);
8217}
8218
8219HInstruction* HOptimizedGraphBuilder::NewCallConstantFunction(
8220 Handle<JSFunction> function, int argument_count,
8221 TailCallMode syntactic_tail_call_mode, TailCallMode tail_call_mode) {
8222 HValue* target = Add<HConstant>(function);
8223 return New<HInvokeFunction>(target, function, argument_count,
8224 syntactic_tail_call_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008225}
8226
8227
8228class FunctionSorter {
8229 public:
8230 explicit FunctionSorter(int index = 0, int ticks = 0, int size = 0)
8231 : index_(index), ticks_(ticks), size_(size) {}
8232
8233 int index() const { return index_; }
8234 int ticks() const { return ticks_; }
8235 int size() const { return size_; }
8236
8237 private:
8238 int index_;
8239 int ticks_;
8240 int size_;
8241};
8242
8243
8244inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
8245 int diff = lhs.ticks() - rhs.ticks();
8246 if (diff != 0) return diff > 0;
8247 return lhs.size() < rhs.size();
8248}
8249
8250
8251void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
8252 HValue* receiver,
8253 SmallMapList* maps,
8254 Handle<String> name) {
8255 int argument_count = expr->arguments()->length() + 1; // Includes receiver.
8256 FunctionSorter order[kMaxCallPolymorphism];
8257
8258 bool handle_smi = false;
8259 bool handled_string = false;
8260 int ordered_functions = 0;
8261
Ben Murdochda12d292016-06-02 14:46:10 +01008262 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
8263 TailCallMode tail_call_mode =
8264 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
8265
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008266 int i;
8267 for (i = 0; i < maps->length() && ordered_functions < kMaxCallPolymorphism;
8268 ++i) {
8269 PropertyAccessInfo info(this, LOAD, maps->at(i), name);
8270 if (info.CanAccessMonomorphic() && info.IsDataConstant() &&
8271 info.constant()->IsJSFunction()) {
8272 if (info.IsStringType()) {
8273 if (handled_string) continue;
8274 handled_string = true;
8275 }
8276 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
8277 if (info.IsNumberType()) {
8278 handle_smi = true;
8279 }
8280 expr->set_target(target);
8281 order[ordered_functions++] = FunctionSorter(
8282 i, target->shared()->profiler_ticks(), InliningAstSize(target));
8283 }
8284 }
8285
8286 std::sort(order, order + ordered_functions);
8287
8288 if (i < maps->length()) {
8289 maps->Clear();
8290 ordered_functions = -1;
8291 }
8292
8293 HBasicBlock* number_block = NULL;
8294 HBasicBlock* join = NULL;
8295 handled_string = false;
8296 int count = 0;
8297
8298 for (int fn = 0; fn < ordered_functions; ++fn) {
8299 int i = order[fn].index();
8300 PropertyAccessInfo info(this, LOAD, maps->at(i), name);
8301 if (info.IsStringType()) {
8302 if (handled_string) continue;
8303 handled_string = true;
8304 }
8305 // Reloads the target.
8306 info.CanAccessMonomorphic();
8307 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
8308
8309 expr->set_target(target);
8310 if (count == 0) {
8311 // Only needed once.
8312 join = graph()->CreateBasicBlock();
8313 if (handle_smi) {
8314 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
8315 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
8316 number_block = graph()->CreateBasicBlock();
8317 FinishCurrentBlock(New<HIsSmiAndBranch>(
8318 receiver, empty_smi_block, not_smi_block));
8319 GotoNoSimulate(empty_smi_block, number_block);
8320 set_current_block(not_smi_block);
8321 } else {
8322 BuildCheckHeapObject(receiver);
8323 }
8324 }
8325 ++count;
8326 HBasicBlock* if_true = graph()->CreateBasicBlock();
8327 HBasicBlock* if_false = graph()->CreateBasicBlock();
8328 HUnaryControlInstruction* compare;
8329
8330 Handle<Map> map = info.map();
8331 if (info.IsNumberType()) {
8332 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
8333 compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
8334 } else if (info.IsStringType()) {
8335 compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
8336 } else {
8337 compare = New<HCompareMap>(receiver, map, if_true, if_false);
8338 }
8339 FinishCurrentBlock(compare);
8340
8341 if (info.IsNumberType()) {
8342 GotoNoSimulate(if_true, number_block);
8343 if_true = number_block;
8344 }
8345
8346 set_current_block(if_true);
8347
8348 AddCheckPrototypeMaps(info.holder(), map);
8349
8350 HValue* function = Add<HConstant>(expr->target());
8351 environment()->SetExpressionStackAt(0, function);
8352 Push(receiver);
8353 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8354 bool needs_wrapping = info.NeedsWrappingFor(target);
8355 bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
8356 if (FLAG_trace_inlining && try_inline) {
8357 Handle<JSFunction> caller = current_info()->closure();
8358 base::SmartArrayPointer<char> caller_name =
8359 caller->shared()->DebugName()->ToCString();
8360 PrintF("Trying to inline the polymorphic call to %s from %s\n",
8361 name->ToCString().get(),
8362 caller_name.get());
8363 }
8364 if (try_inline && TryInlineCall(expr)) {
8365 // Trying to inline will signal that we should bailout from the
8366 // entire compilation by setting stack overflow on the visitor.
8367 if (HasStackOverflow()) return;
8368 } else {
8369 // Since HWrapReceiver currently cannot actually wrap numbers and strings,
Ben Murdochda12d292016-06-02 14:46:10 +01008370 // use the regular call builtin for method calls to wrap the receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008371 // TODO(verwaest): Support creation of value wrappers directly in
8372 // HWrapReceiver.
8373 HInstruction* call =
Ben Murdochda12d292016-06-02 14:46:10 +01008374 needs_wrapping
8375 ? NewCallFunction(
8376 function, argument_count, syntactic_tail_call_mode,
8377 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode)
8378 : NewCallConstantFunction(target, argument_count,
8379 syntactic_tail_call_mode,
8380 tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008381 PushArgumentsFromEnvironment(argument_count);
8382 AddInstruction(call);
8383 Drop(1); // Drop the function.
8384 if (!ast_context()->IsEffect()) Push(call);
8385 }
8386
8387 if (current_block() != NULL) Goto(join);
8388 set_current_block(if_false);
8389 }
8390
8391 // Finish up. Unconditionally deoptimize if we've handled all the maps we
8392 // know about and do not want to handle ones we've never seen. Otherwise
8393 // use a generic IC.
8394 if (ordered_functions == maps->length() && FLAG_deoptimize_uncommon_cases) {
8395 FinishExitWithHardDeoptimization(Deoptimizer::kUnknownMapInPolymorphicCall);
8396 } else {
8397 Property* prop = expr->expression()->AsProperty();
8398 HInstruction* function =
8399 BuildNamedGeneric(LOAD, prop, prop->PropertyFeedbackSlot(), receiver,
8400 name, NULL, prop->IsUninitialized());
8401 AddInstruction(function);
8402 Push(function);
8403 AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
8404
8405 environment()->SetExpressionStackAt(1, function);
8406 environment()->SetExpressionStackAt(0, receiver);
8407 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8408
Ben Murdochda12d292016-06-02 14:46:10 +01008409 HInstruction* call = NewCallFunction(
8410 function, argument_count, syntactic_tail_call_mode,
8411 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008412
8413 PushArgumentsFromEnvironment(argument_count);
8414
8415 Drop(1); // Function.
8416
8417 if (join != NULL) {
8418 AddInstruction(call);
8419 if (!ast_context()->IsEffect()) Push(call);
8420 Goto(join);
8421 } else {
8422 return ast_context()->ReturnInstruction(call, expr->id());
8423 }
8424 }
8425
8426 // We assume that control flow is always live after an expression. So
8427 // even without predecessors to the join block, we set it as the exit
8428 // block and continue by adding instructions there.
8429 DCHECK(join != NULL);
8430 if (join->HasPredecessor()) {
8431 set_current_block(join);
8432 join->SetJoinId(expr->id());
8433 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
8434 } else {
8435 set_current_block(NULL);
8436 }
8437}
8438
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008439void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
8440 Handle<JSFunction> caller,
Ben Murdochda12d292016-06-02 14:46:10 +01008441 const char* reason,
8442 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008443 if (FLAG_trace_inlining) {
8444 base::SmartArrayPointer<char> target_name =
8445 target->shared()->DebugName()->ToCString();
8446 base::SmartArrayPointer<char> caller_name =
8447 caller->shared()->DebugName()->ToCString();
8448 if (reason == NULL) {
Ben Murdochda12d292016-06-02 14:46:10 +01008449 const char* call_mode =
8450 tail_call_mode == TailCallMode::kAllow ? "tail called" : "called";
8451 PrintF("Inlined %s %s from %s.\n", target_name.get(), call_mode,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008452 caller_name.get());
8453 } else {
8454 PrintF("Did not inline %s called from %s (%s).\n",
8455 target_name.get(), caller_name.get(), reason);
8456 }
8457 }
8458}
8459
8460
8461static const int kNotInlinable = 1000000000;
8462
8463
8464int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
8465 if (!FLAG_use_inlining) return kNotInlinable;
8466
8467 // Precondition: call is monomorphic and we have found a target with the
8468 // appropriate arity.
8469 Handle<JSFunction> caller = current_info()->closure();
8470 Handle<SharedFunctionInfo> target_shared(target->shared());
8471
8472 // Always inline functions that force inlining.
8473 if (target_shared->force_inline()) {
8474 return 0;
8475 }
8476 if (target->shared()->IsBuiltin()) {
8477 return kNotInlinable;
8478 }
8479
8480 if (target_shared->IsApiFunction()) {
8481 TraceInline(target, caller, "target is api function");
8482 return kNotInlinable;
8483 }
8484
8485 // Do a quick check on source code length to avoid parsing large
8486 // inlining candidates.
8487 if (target_shared->SourceSize() >
8488 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
8489 TraceInline(target, caller, "target text too big");
8490 return kNotInlinable;
8491 }
8492
8493 // Target must be inlineable.
8494 BailoutReason noopt_reason = target_shared->disable_optimization_reason();
8495 if (!target_shared->IsInlineable() && noopt_reason != kHydrogenFilter) {
8496 TraceInline(target, caller, "target not inlineable");
8497 return kNotInlinable;
8498 }
8499 if (noopt_reason != kNoReason && noopt_reason != kHydrogenFilter) {
8500 TraceInline(target, caller, "target contains unsupported syntax [early]");
8501 return kNotInlinable;
8502 }
8503
8504 int nodes_added = target_shared->ast_node_count();
8505 return nodes_added;
8506}
8507
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008508bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
8509 int arguments_count,
8510 HValue* implicit_return_value,
8511 BailoutId ast_id, BailoutId return_id,
Ben Murdochda12d292016-06-02 14:46:10 +01008512 InliningKind inlining_kind,
8513 TailCallMode syntactic_tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008514 if (target->context()->native_context() !=
8515 top_info()->closure()->context()->native_context()) {
8516 return false;
8517 }
8518 int nodes_added = InliningAstSize(target);
8519 if (nodes_added == kNotInlinable) return false;
8520
8521 Handle<JSFunction> caller = current_info()->closure();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008522 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8523 TraceInline(target, caller, "target AST is too large [early]");
8524 return false;
8525 }
8526
8527 // Don't inline deeper than the maximum number of inlining levels.
8528 HEnvironment* env = environment();
8529 int current_level = 1;
8530 while (env->outer() != NULL) {
8531 if (current_level == FLAG_max_inlining_levels) {
8532 TraceInline(target, caller, "inline depth limit reached");
8533 return false;
8534 }
8535 if (env->outer()->frame_type() == JS_FUNCTION) {
8536 current_level++;
8537 }
8538 env = env->outer();
8539 }
8540
8541 // Don't inline recursive functions.
8542 for (FunctionState* state = function_state();
8543 state != NULL;
8544 state = state->outer()) {
8545 if (*state->compilation_info()->closure() == *target) {
8546 TraceInline(target, caller, "target is recursive");
8547 return false;
8548 }
8549 }
8550
8551 // We don't want to add more than a certain number of nodes from inlining.
8552 // Always inline small methods (<= 10 nodes).
8553 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
8554 kUnlimitedMaxInlinedNodesCumulative)) {
8555 TraceInline(target, caller, "cumulative AST node limit reached");
8556 return false;
8557 }
8558
8559 // Parse and allocate variables.
8560 // Use the same AstValueFactory for creating strings in the sub-compilation
8561 // step, but don't transfer ownership to target_info.
8562 ParseInfo parse_info(zone(), target);
8563 parse_info.set_ast_value_factory(
8564 top_info()->parse_info()->ast_value_factory());
8565 parse_info.set_ast_value_factory_owned(false);
8566
Ben Murdochc5610432016-08-08 18:44:38 +01008567 CompilationInfo target_info(&parse_info, target);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008568 Handle<SharedFunctionInfo> target_shared(target->shared());
8569
Ben Murdoch097c5b22016-05-18 11:27:45 +01008570 if (inlining_kind != CONSTRUCT_CALL_RETURN &&
8571 IsClassConstructor(target_shared->kind())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008572 TraceInline(target, caller, "target is classConstructor");
8573 return false;
8574 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01008575
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008576 if (target_shared->HasDebugInfo()) {
8577 TraceInline(target, caller, "target is being debugged");
8578 return false;
8579 }
8580 if (!Compiler::ParseAndAnalyze(target_info.parse_info())) {
8581 if (target_info.isolate()->has_pending_exception()) {
8582 // Parse or scope error, never optimize this function.
8583 SetStackOverflow();
8584 target_shared->DisableOptimization(kParseScopeError);
8585 }
8586 TraceInline(target, caller, "parse failure");
8587 return false;
8588 }
Ben Murdochc5610432016-08-08 18:44:38 +01008589 if (target_shared->dont_crankshaft()) {
8590 TraceInline(target, caller, "ParseAndAnalyze found incompatibility");
8591 return false;
8592 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008593
8594 if (target_info.scope()->num_heap_slots() > 0) {
8595 TraceInline(target, caller, "target has context-allocated variables");
8596 return false;
8597 }
8598
8599 int rest_index;
8600 Variable* rest = target_info.scope()->rest_parameter(&rest_index);
8601 if (rest) {
8602 TraceInline(target, caller, "target uses rest parameters");
8603 return false;
8604 }
8605
8606 FunctionLiteral* function = target_info.literal();
8607
8608 // The following conditions must be checked again after re-parsing, because
8609 // earlier the information might not have been complete due to lazy parsing.
8610 nodes_added = function->ast_node_count();
8611 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8612 TraceInline(target, caller, "target AST is too large [late]");
8613 return false;
8614 }
8615 if (function->dont_optimize()) {
8616 TraceInline(target, caller, "target contains unsupported syntax [late]");
8617 return false;
8618 }
8619
8620 // If the function uses the arguments object check that inlining of functions
8621 // with arguments object is enabled and the arguments-variable is
8622 // stack allocated.
8623 if (function->scope()->arguments() != NULL) {
8624 if (!FLAG_inline_arguments) {
8625 TraceInline(target, caller, "target uses arguments object");
8626 return false;
8627 }
8628 }
8629
8630 // Unsupported variable references present.
8631 if (function->scope()->this_function_var() != nullptr ||
8632 function->scope()->new_target_var() != nullptr) {
8633 TraceInline(target, caller, "target uses new target or this function");
8634 return false;
8635 }
8636
8637 // All declarations must be inlineable.
8638 ZoneList<Declaration*>* decls = target_info.scope()->declarations();
8639 int decl_count = decls->length();
8640 for (int i = 0; i < decl_count; ++i) {
8641 if (!decls->at(i)->IsInlineable()) {
8642 TraceInline(target, caller, "target has non-trivial declaration");
8643 return false;
8644 }
8645 }
8646
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008647 // Generate the deoptimization data for the unoptimized version of
8648 // the target function if we don't already have it.
8649 if (!Compiler::EnsureDeoptimizationSupport(&target_info)) {
8650 TraceInline(target, caller, "could not generate deoptimization info");
8651 return false;
8652 }
8653 // Remember that we inlined this function. This needs to be called right
8654 // after the EnsureDeoptimizationSupport call so that the code flusher
8655 // does not remove the code with the deoptimization support.
8656 top_info()->AddInlinedFunction(target_info.shared_info());
8657
8658 // ----------------------------------------------------------------
8659 // After this point, we've made a decision to inline this function (so
8660 // TryInline should always return true).
8661
8662 // Type-check the inlined function.
8663 DCHECK(target_shared->has_deoptimization_support());
8664 AstTyper(target_info.isolate(), target_info.zone(), target_info.closure(),
Ben Murdochc5610432016-08-08 18:44:38 +01008665 target_info.scope(), target_info.osr_ast_id(), target_info.literal(),
8666 &bounds_)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008667 .Run();
8668
8669 int inlining_id = 0;
8670 if (top_info()->is_tracking_positions()) {
Ben Murdochc5610432016-08-08 18:44:38 +01008671 inlining_id = TraceInlinedFunction(target_shared, source_position());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008672 }
8673
8674 // Save the pending call context. Set up new one for the inlined function.
8675 // The function state is new-allocated because we need to delete it
8676 // in two different places.
Ben Murdochda12d292016-06-02 14:46:10 +01008677 FunctionState* target_state = new FunctionState(
8678 this, &target_info, inlining_kind, inlining_id,
8679 function_state()->ComputeTailCallMode(syntactic_tail_call_mode));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008680
8681 HConstant* undefined = graph()->GetConstantUndefined();
8682
Ben Murdochda12d292016-06-02 14:46:10 +01008683 HEnvironment* inner_env = environment()->CopyForInlining(
8684 target, arguments_count, function, undefined,
8685 function_state()->inlining_kind(), syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008686
8687 HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
8688 inner_env->BindContext(context);
8689
8690 // Create a dematerialized arguments object for the function, also copy the
8691 // current arguments values to use them for materialization.
8692 HEnvironment* arguments_env = inner_env->arguments_environment();
8693 int parameter_count = arguments_env->parameter_count();
8694 HArgumentsObject* arguments_object = Add<HArgumentsObject>(parameter_count);
8695 for (int i = 0; i < parameter_count; i++) {
8696 arguments_object->AddArgument(arguments_env->Lookup(i), zone());
8697 }
8698
8699 // If the function uses arguments object then bind bind one.
8700 if (function->scope()->arguments() != NULL) {
8701 DCHECK(function->scope()->arguments()->IsStackAllocated());
8702 inner_env->Bind(function->scope()->arguments(), arguments_object);
8703 }
8704
8705 // Capture the state before invoking the inlined function for deopt in the
8706 // inlined function. This simulate has no bailout-id since it's not directly
8707 // reachable for deopt, and is only used to capture the state. If the simulate
8708 // becomes reachable by merging, the ast id of the simulate merged into it is
8709 // adopted.
8710 Add<HSimulate>(BailoutId::None());
8711
8712 current_block()->UpdateEnvironment(inner_env);
8713 Scope* saved_scope = scope();
8714 set_scope(target_info.scope());
Ben Murdochda12d292016-06-02 14:46:10 +01008715 HEnterInlined* enter_inlined = Add<HEnterInlined>(
8716 return_id, target, context, arguments_count, function,
8717 function_state()->inlining_kind(), function->scope()->arguments(),
8718 arguments_object, syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008719 if (top_info()->is_tracking_positions()) {
8720 enter_inlined->set_inlining_id(inlining_id);
8721 }
8722 function_state()->set_entry(enter_inlined);
8723
8724 VisitDeclarations(target_info.scope()->declarations());
8725 VisitStatements(function->body());
8726 set_scope(saved_scope);
8727 if (HasStackOverflow()) {
8728 // Bail out if the inline function did, as we cannot residualize a call
8729 // instead, but do not disable optimization for the outer function.
8730 TraceInline(target, caller, "inline graph construction failed");
8731 target_shared->DisableOptimization(kInliningBailedOut);
8732 current_info()->RetryOptimization(kInliningBailedOut);
8733 delete target_state;
8734 return true;
8735 }
8736
8737 // Update inlined nodes count.
8738 inlined_count_ += nodes_added;
8739
8740 Handle<Code> unoptimized_code(target_shared->code());
8741 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
8742 Handle<TypeFeedbackInfo> type_info(
8743 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
8744 graph()->update_type_change_checksum(type_info->own_type_change_checksum());
8745
Ben Murdochda12d292016-06-02 14:46:10 +01008746 TraceInline(target, caller, NULL, syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008747
8748 if (current_block() != NULL) {
8749 FunctionState* state = function_state();
8750 if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
8751 // Falling off the end of an inlined construct call. In a test context the
8752 // return value will always evaluate to true, in a value context the
8753 // return value is the newly allocated receiver.
8754 if (call_context()->IsTest()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01008755 inlined_test_context()->ReturnValue(graph()->GetConstantTrue());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008756 } else if (call_context()->IsEffect()) {
8757 Goto(function_return(), state);
8758 } else {
8759 DCHECK(call_context()->IsValue());
8760 AddLeaveInlined(implicit_return_value, state);
8761 }
8762 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
8763 // Falling off the end of an inlined setter call. The returned value is
8764 // never used, the value of an assignment is always the value of the RHS
8765 // of the assignment.
8766 if (call_context()->IsTest()) {
8767 inlined_test_context()->ReturnValue(implicit_return_value);
8768 } else if (call_context()->IsEffect()) {
8769 Goto(function_return(), state);
8770 } else {
8771 DCHECK(call_context()->IsValue());
8772 AddLeaveInlined(implicit_return_value, state);
8773 }
8774 } else {
8775 // Falling off the end of a normal inlined function. This basically means
8776 // returning undefined.
8777 if (call_context()->IsTest()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01008778 inlined_test_context()->ReturnValue(graph()->GetConstantFalse());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008779 } else if (call_context()->IsEffect()) {
8780 Goto(function_return(), state);
8781 } else {
8782 DCHECK(call_context()->IsValue());
8783 AddLeaveInlined(undefined, state);
8784 }
8785 }
8786 }
8787
8788 // Fix up the function exits.
8789 if (inlined_test_context() != NULL) {
8790 HBasicBlock* if_true = inlined_test_context()->if_true();
8791 HBasicBlock* if_false = inlined_test_context()->if_false();
8792
8793 HEnterInlined* entry = function_state()->entry();
8794
8795 // Pop the return test context from the expression context stack.
8796 DCHECK(ast_context() == inlined_test_context());
8797 ClearInlinedTestContext();
8798 delete target_state;
8799
8800 // Forward to the real test context.
8801 if (if_true->HasPredecessor()) {
8802 entry->RegisterReturnTarget(if_true, zone());
8803 if_true->SetJoinId(ast_id);
8804 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
8805 Goto(if_true, true_target, function_state());
8806 }
8807 if (if_false->HasPredecessor()) {
8808 entry->RegisterReturnTarget(if_false, zone());
8809 if_false->SetJoinId(ast_id);
8810 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
8811 Goto(if_false, false_target, function_state());
8812 }
8813 set_current_block(NULL);
8814 return true;
8815
8816 } else if (function_return()->HasPredecessor()) {
8817 function_state()->entry()->RegisterReturnTarget(function_return(), zone());
8818 function_return()->SetJoinId(ast_id);
8819 set_current_block(function_return());
8820 } else {
8821 set_current_block(NULL);
8822 }
8823 delete target_state;
8824 return true;
8825}
8826
8827
8828bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
8829 return TryInline(expr->target(), expr->arguments()->length(), NULL,
Ben Murdochda12d292016-06-02 14:46:10 +01008830 expr->id(), expr->ReturnId(), NORMAL_RETURN,
8831 expr->tail_call_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008832}
8833
8834
8835bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
8836 HValue* implicit_return_value) {
8837 return TryInline(expr->target(), expr->arguments()->length(),
8838 implicit_return_value, expr->id(), expr->ReturnId(),
Ben Murdochda12d292016-06-02 14:46:10 +01008839 CONSTRUCT_CALL_RETURN, TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008840}
8841
Ben Murdoch097c5b22016-05-18 11:27:45 +01008842bool HOptimizedGraphBuilder::TryInlineGetter(Handle<Object> getter,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008843 Handle<Map> receiver_map,
8844 BailoutId ast_id,
8845 BailoutId return_id) {
8846 if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
Ben Murdoch097c5b22016-05-18 11:27:45 +01008847 return getter->IsJSFunction() &&
8848 TryInline(Handle<JSFunction>::cast(getter), 0, NULL, ast_id, return_id,
Ben Murdochda12d292016-06-02 14:46:10 +01008849 GETTER_CALL_RETURN, TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008850}
8851
Ben Murdoch097c5b22016-05-18 11:27:45 +01008852bool HOptimizedGraphBuilder::TryInlineSetter(Handle<Object> setter,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008853 Handle<Map> receiver_map,
8854 BailoutId id,
8855 BailoutId assignment_id,
8856 HValue* implicit_return_value) {
8857 if (TryInlineApiSetter(setter, receiver_map, id)) return true;
Ben Murdoch097c5b22016-05-18 11:27:45 +01008858 return setter->IsJSFunction() &&
8859 TryInline(Handle<JSFunction>::cast(setter), 1, implicit_return_value,
Ben Murdochda12d292016-06-02 14:46:10 +01008860 id, assignment_id, SETTER_CALL_RETURN,
8861 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008862}
8863
8864
8865bool HOptimizedGraphBuilder::TryInlineIndirectCall(Handle<JSFunction> function,
8866 Call* expr,
8867 int arguments_count) {
8868 return TryInline(function, arguments_count, NULL, expr->id(),
Ben Murdochda12d292016-06-02 14:46:10 +01008869 expr->ReturnId(), NORMAL_RETURN, expr->tail_call_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008870}
8871
8872
8873bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
8874 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8875 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
Ben Murdochda12d292016-06-02 14:46:10 +01008876 // We intentionally ignore expr->tail_call_mode() here because builtins
8877 // we inline here do not observe if they were tail called or not.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008878 switch (id) {
8879 case kMathExp:
8880 if (!FLAG_fast_math) break;
8881 // Fall through if FLAG_fast_math.
8882 case kMathRound:
8883 case kMathFround:
8884 case kMathFloor:
8885 case kMathAbs:
8886 case kMathSqrt:
8887 case kMathLog:
8888 case kMathClz32:
8889 if (expr->arguments()->length() == 1) {
8890 HValue* argument = Pop();
8891 Drop(2); // Receiver and function.
8892 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8893 ast_context()->ReturnInstruction(op, expr->id());
8894 return true;
8895 }
8896 break;
8897 case kMathImul:
8898 if (expr->arguments()->length() == 2) {
8899 HValue* right = Pop();
8900 HValue* left = Pop();
8901 Drop(2); // Receiver and function.
8902 HInstruction* op =
8903 HMul::NewImul(isolate(), zone(), context(), left, right);
8904 ast_context()->ReturnInstruction(op, expr->id());
8905 return true;
8906 }
8907 break;
8908 default:
8909 // Not supported for inlining yet.
8910 break;
8911 }
8912 return false;
8913}
8914
8915
8916// static
8917bool HOptimizedGraphBuilder::IsReadOnlyLengthDescriptor(
8918 Handle<Map> jsarray_map) {
8919 DCHECK(!jsarray_map->is_dictionary_map());
8920 Isolate* isolate = jsarray_map->GetIsolate();
8921 Handle<Name> length_string = isolate->factory()->length_string();
8922 DescriptorArray* descriptors = jsarray_map->instance_descriptors();
Ben Murdoch097c5b22016-05-18 11:27:45 +01008923 int number =
8924 descriptors->SearchWithCache(isolate, *length_string, *jsarray_map);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008925 DCHECK_NE(DescriptorArray::kNotFound, number);
8926 return descriptors->GetDetails(number).IsReadOnly();
8927}
8928
8929
8930// static
8931bool HOptimizedGraphBuilder::CanInlineArrayResizeOperation(
8932 Handle<Map> receiver_map) {
8933 return !receiver_map.is_null() && receiver_map->prototype()->IsJSObject() &&
8934 receiver_map->instance_type() == JS_ARRAY_TYPE &&
8935 IsFastElementsKind(receiver_map->elements_kind()) &&
Ben Murdochc5610432016-08-08 18:44:38 +01008936 !receiver_map->is_dictionary_map() && receiver_map->is_extensible() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008937 (!receiver_map->is_prototype_map() || receiver_map->is_stable()) &&
8938 !IsReadOnlyLengthDescriptor(receiver_map);
8939}
8940
8941
8942bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
8943 Call* expr, Handle<JSFunction> function, Handle<Map> receiver_map,
8944 int args_count_no_receiver) {
8945 if (!function->shared()->HasBuiltinFunctionId()) return false;
8946 BuiltinFunctionId id = function->shared()->builtin_function_id();
8947 int argument_count = args_count_no_receiver + 1; // Plus receiver.
8948
8949 if (receiver_map.is_null()) {
8950 HValue* receiver = environment()->ExpressionStackAt(args_count_no_receiver);
8951 if (receiver->IsConstant() &&
8952 HConstant::cast(receiver)->handle(isolate())->IsHeapObject()) {
8953 receiver_map =
8954 handle(Handle<HeapObject>::cast(
8955 HConstant::cast(receiver)->handle(isolate()))->map());
8956 }
8957 }
8958 // Try to inline calls like Math.* as operations in the calling function.
8959 switch (id) {
Ben Murdochda12d292016-06-02 14:46:10 +01008960 case kObjectHasOwnProperty: {
8961 // It's not safe to look through the phi for elements if we're compiling
8962 // for osr.
8963 if (top_info()->is_osr()) return false;
8964 if (argument_count != 2) return false;
8965 HValue* key = Top();
8966 if (!key->IsLoadKeyed()) return false;
8967 HValue* elements = HLoadKeyed::cast(key)->elements();
8968 if (!elements->IsPhi() || elements->OperandCount() != 1) return false;
8969 if (!elements->OperandAt(0)->IsForInCacheArray()) return false;
8970 HForInCacheArray* cache = HForInCacheArray::cast(elements->OperandAt(0));
8971 HValue* receiver = environment()->ExpressionStackAt(1);
8972 if (!receiver->IsPhi() || receiver->OperandCount() != 1) return false;
8973 if (cache->enumerable() != receiver->OperandAt(0)) return false;
8974 Drop(3); // key, receiver, function
8975 Add<HCheckMapValue>(receiver, cache->map());
8976 ast_context()->ReturnValue(graph()->GetConstantTrue());
8977 return true;
8978 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008979 case kStringCharCodeAt:
8980 case kStringCharAt:
8981 if (argument_count == 2) {
8982 HValue* index = Pop();
8983 HValue* string = Pop();
8984 Drop(1); // Function.
8985 HInstruction* char_code =
8986 BuildStringCharCodeAt(string, index);
8987 if (id == kStringCharCodeAt) {
8988 ast_context()->ReturnInstruction(char_code, expr->id());
8989 return true;
8990 }
8991 AddInstruction(char_code);
8992 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
8993 ast_context()->ReturnInstruction(result, expr->id());
8994 return true;
8995 }
8996 break;
8997 case kStringFromCharCode:
8998 if (argument_count == 2) {
8999 HValue* argument = Pop();
9000 Drop(2); // Receiver and function.
Ben Murdochda12d292016-06-02 14:46:10 +01009001 argument = AddUncasted<HForceRepresentation>(
9002 argument, Representation::Integer32());
9003 argument->SetFlag(HValue::kTruncatingToInt32);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009004 HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
9005 ast_context()->ReturnInstruction(result, expr->id());
9006 return true;
9007 }
9008 break;
9009 case kMathExp:
9010 if (!FLAG_fast_math) break;
9011 // Fall through if FLAG_fast_math.
9012 case kMathRound:
9013 case kMathFround:
9014 case kMathFloor:
9015 case kMathAbs:
9016 case kMathSqrt:
9017 case kMathLog:
9018 case kMathClz32:
9019 if (argument_count == 2) {
9020 HValue* argument = Pop();
9021 Drop(2); // Receiver and function.
9022 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
9023 ast_context()->ReturnInstruction(op, expr->id());
9024 return true;
9025 }
9026 break;
9027 case kMathPow:
9028 if (argument_count == 3) {
9029 HValue* right = Pop();
9030 HValue* left = Pop();
9031 Drop(2); // Receiver and function.
9032 HInstruction* result = NULL;
9033 // Use sqrt() if exponent is 0.5 or -0.5.
9034 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
9035 double exponent = HConstant::cast(right)->DoubleValue();
9036 if (exponent == 0.5) {
9037 result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
9038 } else if (exponent == -0.5) {
9039 HValue* one = graph()->GetConstant1();
9040 HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
9041 left, kMathPowHalf);
9042 // MathPowHalf doesn't have side effects so there's no need for
9043 // an environment simulation here.
9044 DCHECK(!sqrt->HasObservableSideEffects());
9045 result = NewUncasted<HDiv>(one, sqrt);
9046 } else if (exponent == 2.0) {
9047 result = NewUncasted<HMul>(left, left);
9048 }
9049 }
9050
9051 if (result == NULL) {
9052 result = NewUncasted<HPower>(left, right);
9053 }
9054 ast_context()->ReturnInstruction(result, expr->id());
9055 return true;
9056 }
9057 break;
9058 case kMathMax:
9059 case kMathMin:
9060 if (argument_count == 3) {
9061 HValue* right = Pop();
9062 HValue* left = Pop();
9063 Drop(2); // Receiver and function.
9064 HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
9065 : HMathMinMax::kMathMax;
9066 HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
9067 ast_context()->ReturnInstruction(result, expr->id());
9068 return true;
9069 }
9070 break;
9071 case kMathImul:
9072 if (argument_count == 3) {
9073 HValue* right = Pop();
9074 HValue* left = Pop();
9075 Drop(2); // Receiver and function.
9076 HInstruction* result =
9077 HMul::NewImul(isolate(), zone(), context(), left, right);
9078 ast_context()->ReturnInstruction(result, expr->id());
9079 return true;
9080 }
9081 break;
9082 case kArrayPop: {
9083 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
9084 ElementsKind elements_kind = receiver_map->elements_kind();
9085
9086 Drop(args_count_no_receiver);
9087 HValue* result;
9088 HValue* reduced_length;
9089 HValue* receiver = Pop();
9090
9091 HValue* checked_object = AddCheckMap(receiver, receiver_map);
9092 HValue* length =
9093 Add<HLoadNamedField>(checked_object, nullptr,
9094 HObjectAccess::ForArrayLength(elements_kind));
9095
9096 Drop(1); // Function.
9097
9098 { NoObservableSideEffectsScope scope(this);
9099 IfBuilder length_checker(this);
9100
9101 HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
9102 length, graph()->GetConstant0(), Token::EQ);
9103 length_checker.Then();
9104
9105 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
9106
9107 length_checker.Else();
9108 HValue* elements = AddLoadElements(checked_object);
9109 // Ensure that we aren't popping from a copy-on-write array.
9110 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
9111 elements = BuildCopyElementsOnWrite(checked_object, elements,
9112 elements_kind, length);
9113 }
9114 reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
9115 result = AddElementAccess(elements, reduced_length, nullptr,
9116 bounds_check, nullptr, elements_kind, LOAD);
9117 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
9118 ? graph()->GetConstantHole()
9119 : Add<HConstant>(HConstant::kHoleNaN);
9120 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
9121 elements_kind = FAST_HOLEY_ELEMENTS;
9122 }
9123 AddElementAccess(elements, reduced_length, hole, bounds_check, nullptr,
9124 elements_kind, STORE);
9125 Add<HStoreNamedField>(
9126 checked_object, HObjectAccess::ForArrayLength(elements_kind),
9127 reduced_length, STORE_TO_INITIALIZED_ENTRY);
9128
9129 if (!ast_context()->IsEffect()) Push(result);
9130
9131 length_checker.End();
9132 }
9133 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
9134 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9135 if (!ast_context()->IsEffect()) Drop(1);
9136
9137 ast_context()->ReturnValue(result);
9138 return true;
9139 }
9140 case kArrayPush: {
9141 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
9142 ElementsKind elements_kind = receiver_map->elements_kind();
9143
9144 // If there may be elements accessors in the prototype chain, the fast
9145 // inlined version can't be used.
9146 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
9147 // If there currently can be no elements accessors on the prototype chain,
9148 // it doesn't mean that there won't be any later. Install a full prototype
9149 // chain check to trap element accessors being installed on the prototype
9150 // chain, which would cause elements to go to dictionary mode and result
9151 // in a map change.
9152 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
9153 BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
9154
9155 // Protect against adding elements to the Array prototype, which needs to
9156 // route through appropriate bottlenecks.
9157 if (isolate()->IsFastArrayConstructorPrototypeChainIntact() &&
9158 !prototype->IsJSArray()) {
9159 return false;
9160 }
9161
9162 const int argc = args_count_no_receiver;
9163 if (argc != 1) return false;
9164
9165 HValue* value_to_push = Pop();
9166 HValue* array = Pop();
9167 Drop(1); // Drop function.
9168
9169 HInstruction* new_size = NULL;
9170 HValue* length = NULL;
9171
9172 {
9173 NoObservableSideEffectsScope scope(this);
9174
9175 length = Add<HLoadNamedField>(
9176 array, nullptr, HObjectAccess::ForArrayLength(elements_kind));
9177
9178 new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
9179
9180 bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
9181 HValue* checked_array = Add<HCheckMaps>(array, receiver_map);
9182 BuildUncheckedMonomorphicElementAccess(
9183 checked_array, length, value_to_push, is_array, elements_kind,
9184 STORE, NEVER_RETURN_HOLE, STORE_AND_GROW_NO_TRANSITION);
9185
9186 if (!ast_context()->IsEffect()) Push(new_size);
9187 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9188 if (!ast_context()->IsEffect()) Drop(1);
9189 }
9190
9191 ast_context()->ReturnValue(new_size);
9192 return true;
9193 }
9194 case kArrayShift: {
9195 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
9196 ElementsKind kind = receiver_map->elements_kind();
9197
9198 // If there may be elements accessors in the prototype chain, the fast
9199 // inlined version can't be used.
9200 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
9201
9202 // If there currently can be no elements accessors on the prototype chain,
9203 // it doesn't mean that there won't be any later. Install a full prototype
9204 // chain check to trap element accessors being installed on the prototype
9205 // chain, which would cause elements to go to dictionary mode and result
9206 // in a map change.
9207 BuildCheckPrototypeMaps(
9208 handle(JSObject::cast(receiver_map->prototype()), isolate()),
9209 Handle<JSObject>::null());
9210
9211 // Threshold for fast inlined Array.shift().
9212 HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
9213
9214 Drop(args_count_no_receiver);
9215 HValue* receiver = Pop();
Ben Murdochda12d292016-06-02 14:46:10 +01009216 Drop(1); // Function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009217 HValue* result;
9218
9219 {
9220 NoObservableSideEffectsScope scope(this);
9221
9222 HValue* length = Add<HLoadNamedField>(
9223 receiver, nullptr, HObjectAccess::ForArrayLength(kind));
9224
9225 IfBuilder if_lengthiszero(this);
9226 HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
9227 length, graph()->GetConstant0(), Token::EQ);
9228 if_lengthiszero.Then();
9229 {
9230 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
9231 }
9232 if_lengthiszero.Else();
9233 {
9234 HValue* elements = AddLoadElements(receiver);
9235
9236 // Check if we can use the fast inlined Array.shift().
9237 IfBuilder if_inline(this);
9238 if_inline.If<HCompareNumericAndBranch>(
9239 length, inline_threshold, Token::LTE);
9240 if (IsFastSmiOrObjectElementsKind(kind)) {
9241 // We cannot handle copy-on-write backing stores here.
9242 if_inline.AndIf<HCompareMap>(
9243 elements, isolate()->factory()->fixed_array_map());
9244 }
9245 if_inline.Then();
9246 {
9247 // Remember the result.
9248 if (!ast_context()->IsEffect()) {
9249 Push(AddElementAccess(elements, graph()->GetConstant0(), nullptr,
9250 lengthiszero, nullptr, kind, LOAD));
9251 }
9252
9253 // Compute the new length.
9254 HValue* new_length = AddUncasted<HSub>(
9255 length, graph()->GetConstant1());
9256 new_length->ClearFlag(HValue::kCanOverflow);
9257
9258 // Copy the remaining elements.
9259 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
9260 {
9261 HValue* new_key = loop.BeginBody(
9262 graph()->GetConstant0(), new_length, Token::LT);
9263 HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
9264 key->ClearFlag(HValue::kCanOverflow);
9265 ElementsKind copy_kind =
9266 kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
9267 HValue* element =
9268 AddUncasted<HLoadKeyed>(elements, key, lengthiszero, nullptr,
9269 copy_kind, ALLOW_RETURN_HOLE);
9270 HStoreKeyed* store = Add<HStoreKeyed>(elements, new_key, element,
9271 nullptr, copy_kind);
9272 store->SetFlag(HValue::kAllowUndefinedAsNaN);
9273 }
9274 loop.EndBody();
9275
9276 // Put a hole at the end.
9277 HValue* hole = IsFastSmiOrObjectElementsKind(kind)
9278 ? graph()->GetConstantHole()
9279 : Add<HConstant>(HConstant::kHoleNaN);
9280 if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS;
9281 Add<HStoreKeyed>(elements, new_length, hole, nullptr, kind,
9282 INITIALIZING_STORE);
9283
9284 // Remember new length.
9285 Add<HStoreNamedField>(
9286 receiver, HObjectAccess::ForArrayLength(kind),
9287 new_length, STORE_TO_INITIALIZED_ENTRY);
9288 }
9289 if_inline.Else();
9290 {
9291 Add<HPushArguments>(receiver);
Ben Murdochda12d292016-06-02 14:46:10 +01009292 result = AddInstruction(NewCallConstantFunction(
9293 function, 1, TailCallMode::kDisallow, TailCallMode::kDisallow));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009294 if (!ast_context()->IsEffect()) Push(result);
9295 }
9296 if_inline.End();
9297 }
9298 if_lengthiszero.End();
9299 }
9300 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
9301 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9302 if (!ast_context()->IsEffect()) Drop(1);
9303 ast_context()->ReturnValue(result);
9304 return true;
9305 }
9306 case kArrayIndexOf:
9307 case kArrayLastIndexOf: {
9308 if (receiver_map.is_null()) return false;
9309 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
Ben Murdoch097c5b22016-05-18 11:27:45 +01009310 if (!receiver_map->prototype()->IsJSObject()) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009311 ElementsKind kind = receiver_map->elements_kind();
9312 if (!IsFastElementsKind(kind)) return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009313 if (argument_count != 2) return false;
9314 if (!receiver_map->is_extensible()) return false;
9315
9316 // If there may be elements accessors in the prototype chain, the fast
9317 // inlined version can't be used.
9318 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
9319
9320 // If there currently can be no elements accessors on the prototype chain,
9321 // it doesn't mean that there won't be any later. Install a full prototype
9322 // chain check to trap element accessors being installed on the prototype
9323 // chain, which would cause elements to go to dictionary mode and result
9324 // in a map change.
9325 BuildCheckPrototypeMaps(
9326 handle(JSObject::cast(receiver_map->prototype()), isolate()),
9327 Handle<JSObject>::null());
9328
9329 HValue* search_element = Pop();
9330 HValue* receiver = Pop();
9331 Drop(1); // Drop function.
9332
9333 ArrayIndexOfMode mode = (id == kArrayIndexOf)
9334 ? kFirstIndexOf : kLastIndexOf;
9335 HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
9336
9337 if (!ast_context()->IsEffect()) Push(index);
9338 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9339 if (!ast_context()->IsEffect()) Drop(1);
9340 ast_context()->ReturnValue(index);
9341 return true;
9342 }
9343 default:
9344 // Not yet supported for inlining.
9345 break;
9346 }
9347 return false;
9348}
9349
9350
9351bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
9352 HValue* receiver) {
9353 Handle<JSFunction> function = expr->target();
9354 int argc = expr->arguments()->length();
9355 SmallMapList receiver_maps;
Ben Murdochda12d292016-06-02 14:46:10 +01009356 return TryInlineApiCall(function, receiver, &receiver_maps, argc, expr->id(),
9357 kCallApiFunction, expr->tail_call_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009358}
9359
9360
9361bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
9362 Call* expr,
9363 HValue* receiver,
9364 SmallMapList* receiver_maps) {
9365 Handle<JSFunction> function = expr->target();
9366 int argc = expr->arguments()->length();
Ben Murdochda12d292016-06-02 14:46:10 +01009367 return TryInlineApiCall(function, receiver, receiver_maps, argc, expr->id(),
9368 kCallApiMethod, expr->tail_call_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009369}
9370
Ben Murdoch097c5b22016-05-18 11:27:45 +01009371bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<Object> function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009372 Handle<Map> receiver_map,
9373 BailoutId ast_id) {
9374 SmallMapList receiver_maps(1, zone());
9375 receiver_maps.Add(receiver_map, zone());
9376 return TryInlineApiCall(function,
9377 NULL, // Receiver is on expression stack.
Ben Murdochda12d292016-06-02 14:46:10 +01009378 &receiver_maps, 0, ast_id, kCallApiGetter,
9379 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009380}
9381
Ben Murdoch097c5b22016-05-18 11:27:45 +01009382bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<Object> function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009383 Handle<Map> receiver_map,
9384 BailoutId ast_id) {
9385 SmallMapList receiver_maps(1, zone());
9386 receiver_maps.Add(receiver_map, zone());
9387 return TryInlineApiCall(function,
9388 NULL, // Receiver is on expression stack.
Ben Murdochda12d292016-06-02 14:46:10 +01009389 &receiver_maps, 1, ast_id, kCallApiSetter,
9390 TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009391}
9392
Ben Murdochda12d292016-06-02 14:46:10 +01009393bool HOptimizedGraphBuilder::TryInlineApiCall(
9394 Handle<Object> function, HValue* receiver, SmallMapList* receiver_maps,
9395 int argc, BailoutId ast_id, ApiCallType call_type,
9396 TailCallMode syntactic_tail_call_mode) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01009397 if (function->IsJSFunction() &&
9398 Handle<JSFunction>::cast(function)->context()->native_context() !=
9399 top_info()->closure()->context()->native_context()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009400 return false;
9401 }
Ben Murdochda12d292016-06-02 14:46:10 +01009402 if (argc > CallApiCallbackStub::kArgMax) {
9403 return false;
9404 }
9405
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009406 CallOptimization optimization(function);
9407 if (!optimization.is_simple_api_call()) return false;
9408 Handle<Map> holder_map;
9409 for (int i = 0; i < receiver_maps->length(); ++i) {
9410 auto map = receiver_maps->at(i);
9411 // Don't inline calls to receivers requiring accesschecks.
9412 if (map->is_access_check_needed()) return false;
9413 }
9414 if (call_type == kCallApiFunction) {
9415 // Cannot embed a direct reference to the global proxy map
9416 // as it maybe dropped on deserialization.
9417 CHECK(!isolate()->serializer_enabled());
Ben Murdoch097c5b22016-05-18 11:27:45 +01009418 DCHECK(function->IsJSFunction());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009419 DCHECK_EQ(0, receiver_maps->length());
Ben Murdoch097c5b22016-05-18 11:27:45 +01009420 receiver_maps->Add(
9421 handle(Handle<JSFunction>::cast(function)->global_proxy()->map()),
9422 zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009423 }
9424 CallOptimization::HolderLookup holder_lookup =
9425 CallOptimization::kHolderNotFound;
9426 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
9427 receiver_maps->first(), &holder_lookup);
9428 if (holder_lookup == CallOptimization::kHolderNotFound) return false;
9429
9430 if (FLAG_trace_inlining) {
9431 PrintF("Inlining api function ");
9432 function->ShortPrint();
9433 PrintF("\n");
9434 }
9435
9436 bool is_function = false;
9437 bool is_store = false;
9438 switch (call_type) {
9439 case kCallApiFunction:
9440 case kCallApiMethod:
9441 // Need to check that none of the receiver maps could have changed.
9442 Add<HCheckMaps>(receiver, receiver_maps);
9443 // Need to ensure the chain between receiver and api_holder is intact.
9444 if (holder_lookup == CallOptimization::kHolderFound) {
9445 AddCheckPrototypeMaps(api_holder, receiver_maps->first());
9446 } else {
9447 DCHECK_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
9448 }
9449 // Includes receiver.
9450 PushArgumentsFromEnvironment(argc + 1);
9451 is_function = true;
9452 break;
9453 case kCallApiGetter:
9454 // Receiver and prototype chain cannot have changed.
9455 DCHECK_EQ(0, argc);
9456 DCHECK_NULL(receiver);
9457 // Receiver is on expression stack.
9458 receiver = Pop();
9459 Add<HPushArguments>(receiver);
9460 break;
9461 case kCallApiSetter:
9462 {
9463 is_store = true;
9464 // Receiver and prototype chain cannot have changed.
9465 DCHECK_EQ(1, argc);
9466 DCHECK_NULL(receiver);
9467 // Receiver and value are on expression stack.
9468 HValue* value = Pop();
9469 receiver = Pop();
9470 Add<HPushArguments>(receiver, value);
9471 break;
9472 }
9473 }
9474
9475 HValue* holder = NULL;
9476 switch (holder_lookup) {
9477 case CallOptimization::kHolderFound:
9478 holder = Add<HConstant>(api_holder);
9479 break;
9480 case CallOptimization::kHolderIsReceiver:
9481 holder = receiver;
9482 break;
9483 case CallOptimization::kHolderNotFound:
9484 UNREACHABLE();
9485 break;
9486 }
9487 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
9488 Handle<Object> call_data_obj(api_call_info->data(), isolate());
9489 bool call_data_undefined = call_data_obj->IsUndefined();
9490 HValue* call_data = Add<HConstant>(call_data_obj);
9491 ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
9492 ExternalReference ref = ExternalReference(&fun,
9493 ExternalReference::DIRECT_API_CALL,
9494 isolate());
9495 HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
9496
9497 HValue* op_vals[] = {context(), Add<HConstant>(function), call_data, holder,
9498 api_function_address, nullptr};
9499
9500 HInstruction* call = nullptr;
Ben Murdochda12d292016-06-02 14:46:10 +01009501 CHECK(argc <= CallApiCallbackStub::kArgMax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009502 if (!is_function) {
Ben Murdochda12d292016-06-02 14:46:10 +01009503 CallApiCallbackStub stub(isolate(), is_store, call_data_undefined,
Ben Murdoch097c5b22016-05-18 11:27:45 +01009504 !optimization.is_constant_call());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009505 Handle<Code> code = stub.GetCode();
9506 HConstant* code_value = Add<HConstant>(code);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009507 call = New<HCallWithDescriptor>(
Ben Murdochda12d292016-06-02 14:46:10 +01009508 code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
9509 Vector<HValue*>(op_vals, arraysize(op_vals) - 1),
9510 syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009511 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01009512 CallApiCallbackStub stub(isolate(), argc, call_data_undefined);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009513 Handle<Code> code = stub.GetCode();
9514 HConstant* code_value = Add<HConstant>(code);
Ben Murdochda12d292016-06-02 14:46:10 +01009515 call = New<HCallWithDescriptor>(
9516 code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
9517 Vector<HValue*>(op_vals, arraysize(op_vals) - 1),
9518 syntactic_tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009519 Drop(1); // Drop function.
9520 }
9521
9522 ast_context()->ReturnInstruction(call, ast_id);
9523 return true;
9524}
9525
9526
9527void HOptimizedGraphBuilder::HandleIndirectCall(Call* expr, HValue* function,
9528 int arguments_count) {
9529 Handle<JSFunction> known_function;
9530 int args_count_no_receiver = arguments_count - 1;
9531 if (function->IsConstant() &&
9532 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9533 known_function =
9534 Handle<JSFunction>::cast(HConstant::cast(function)->handle(isolate()));
9535 if (TryInlineBuiltinMethodCall(expr, known_function, Handle<Map>(),
9536 args_count_no_receiver)) {
9537 if (FLAG_trace_inlining) {
9538 PrintF("Inlining builtin ");
9539 known_function->ShortPrint();
9540 PrintF("\n");
9541 }
9542 return;
9543 }
9544
9545 if (TryInlineIndirectCall(known_function, expr, args_count_no_receiver)) {
9546 return;
9547 }
9548 }
9549
Ben Murdochda12d292016-06-02 14:46:10 +01009550 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9551 TailCallMode tail_call_mode =
9552 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9553
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009554 PushArgumentsFromEnvironment(arguments_count);
9555 HInvokeFunction* call =
Ben Murdochda12d292016-06-02 14:46:10 +01009556 New<HInvokeFunction>(function, known_function, arguments_count,
9557 syntactic_tail_call_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009558 Drop(1); // Function
9559 ast_context()->ReturnInstruction(call, expr->id());
9560}
9561
9562
9563bool HOptimizedGraphBuilder::TryIndirectCall(Call* expr) {
9564 DCHECK(expr->expression()->IsProperty());
9565
9566 if (!expr->IsMonomorphic()) {
9567 return false;
9568 }
9569 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9570 if (function_map->instance_type() != JS_FUNCTION_TYPE ||
9571 !expr->target()->shared()->HasBuiltinFunctionId()) {
9572 return false;
9573 }
9574
9575 switch (expr->target()->shared()->builtin_function_id()) {
9576 case kFunctionCall: {
9577 if (expr->arguments()->length() == 0) return false;
9578 BuildFunctionCall(expr);
9579 return true;
9580 }
9581 case kFunctionApply: {
9582 // For .apply, only the pattern f.apply(receiver, arguments)
9583 // is supported.
9584 if (current_info()->scope()->arguments() == NULL) return false;
9585
9586 if (!CanBeFunctionApplyArguments(expr)) return false;
9587
9588 BuildFunctionApply(expr);
9589 return true;
9590 }
9591 default: { return false; }
9592 }
9593 UNREACHABLE();
9594}
9595
9596
9597// f.apply(...)
9598void HOptimizedGraphBuilder::BuildFunctionApply(Call* expr) {
9599 ZoneList<Expression*>* args = expr->arguments();
9600 CHECK_ALIVE(VisitForValue(args->at(0)));
9601 HValue* receiver = Pop(); // receiver
9602 HValue* function = Pop(); // f
9603 Drop(1); // apply
9604
9605 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9606 HValue* checked_function = AddCheckMap(function, function_map);
9607
9608 if (function_state()->outer() == NULL) {
Ben Murdochda12d292016-06-02 14:46:10 +01009609 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9610 TailCallMode tail_call_mode =
9611 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9612
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009613 HInstruction* elements = Add<HArgumentsElements>(false);
9614 HInstruction* length = Add<HArgumentsLength>(elements);
9615 HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function);
Ben Murdochda12d292016-06-02 14:46:10 +01009616 HInstruction* result = New<HApplyArguments>(
9617 function, wrapped_receiver, length, elements, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009618 ast_context()->ReturnInstruction(result, expr->id());
9619 } else {
9620 // We are inside inlined function and we know exactly what is inside
9621 // arguments object. But we need to be able to materialize at deopt.
9622 DCHECK_EQ(environment()->arguments_environment()->parameter_count(),
9623 function_state()->entry()->arguments_object()->arguments_count());
9624 HArgumentsObject* args = function_state()->entry()->arguments_object();
9625 const ZoneList<HValue*>* arguments_values = args->arguments_values();
9626 int arguments_count = arguments_values->length();
9627 Push(function);
9628 Push(BuildWrapReceiver(receiver, checked_function));
9629 for (int i = 1; i < arguments_count; i++) {
9630 Push(arguments_values->at(i));
9631 }
9632 HandleIndirectCall(expr, function, arguments_count);
9633 }
9634}
9635
9636
9637// f.call(...)
9638void HOptimizedGraphBuilder::BuildFunctionCall(Call* expr) {
9639 HValue* function = Top(); // f
9640 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9641 HValue* checked_function = AddCheckMap(function, function_map);
9642
9643 // f and call are on the stack in the unoptimized code
9644 // during evaluation of the arguments.
9645 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9646
9647 int args_length = expr->arguments()->length();
9648 int receiver_index = args_length - 1;
9649 // Patch the receiver.
9650 HValue* receiver = BuildWrapReceiver(
9651 environment()->ExpressionStackAt(receiver_index), checked_function);
9652 environment()->SetExpressionStackAt(receiver_index, receiver);
9653
9654 // Call must not be on the stack from now on.
9655 int call_index = args_length + 1;
9656 environment()->RemoveExpressionStackAt(call_index);
9657
9658 HandleIndirectCall(expr, function, args_length);
9659}
9660
9661
9662HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
9663 Handle<JSFunction> target) {
9664 SharedFunctionInfo* shared = target->shared();
9665 if (is_sloppy(shared->language_mode()) && !shared->native()) {
9666 // Cannot embed a direct reference to the global proxy
9667 // as is it dropped on deserialization.
9668 CHECK(!isolate()->serializer_enabled());
9669 Handle<JSObject> global_proxy(target->context()->global_proxy());
9670 return Add<HConstant>(global_proxy);
9671 }
9672 return graph()->GetConstantUndefined();
9673}
9674
9675
9676void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression,
9677 int arguments_count,
9678 HValue* function,
9679 Handle<AllocationSite> site) {
9680 Add<HCheckValue>(function, array_function());
9681
9682 if (IsCallArrayInlineable(arguments_count, site)) {
9683 BuildInlinedCallArray(expression, arguments_count, site);
9684 return;
9685 }
9686
9687 HInstruction* call = PreProcessCall(New<HCallNewArray>(
9688 function, arguments_count + 1, site->GetElementsKind(), site));
9689 if (expression->IsCall()) {
9690 Drop(1);
9691 }
9692 ast_context()->ReturnInstruction(call, expression->id());
9693}
9694
9695
9696HValue* HOptimizedGraphBuilder::BuildArrayIndexOf(HValue* receiver,
9697 HValue* search_element,
9698 ElementsKind kind,
9699 ArrayIndexOfMode mode) {
9700 DCHECK(IsFastElementsKind(kind));
9701
9702 NoObservableSideEffectsScope no_effects(this);
9703
9704 HValue* elements = AddLoadElements(receiver);
9705 HValue* length = AddLoadArrayLength(receiver, kind);
9706
9707 HValue* initial;
9708 HValue* terminating;
9709 Token::Value token;
9710 LoopBuilder::Direction direction;
9711 if (mode == kFirstIndexOf) {
9712 initial = graph()->GetConstant0();
9713 terminating = length;
9714 token = Token::LT;
9715 direction = LoopBuilder::kPostIncrement;
9716 } else {
9717 DCHECK_EQ(kLastIndexOf, mode);
9718 initial = length;
9719 terminating = graph()->GetConstant0();
9720 token = Token::GT;
9721 direction = LoopBuilder::kPreDecrement;
9722 }
9723
9724 Push(graph()->GetConstantMinus1());
9725 if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) {
9726 // Make sure that we can actually compare numbers correctly below, see
9727 // https://code.google.com/p/chromium/issues/detail?id=407946 for details.
9728 search_element = AddUncasted<HForceRepresentation>(
9729 search_element, IsFastSmiElementsKind(kind) ? Representation::Smi()
9730 : Representation::Double());
9731
9732 LoopBuilder loop(this, context(), direction);
9733 {
9734 HValue* index = loop.BeginBody(initial, terminating, token);
9735 HValue* element = AddUncasted<HLoadKeyed>(
9736 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9737 IfBuilder if_issame(this);
9738 if_issame.If<HCompareNumericAndBranch>(element, search_element,
9739 Token::EQ_STRICT);
9740 if_issame.Then();
9741 {
9742 Drop(1);
9743 Push(index);
9744 loop.Break();
9745 }
9746 if_issame.End();
9747 }
9748 loop.EndBody();
9749 } else {
9750 IfBuilder if_isstring(this);
9751 if_isstring.If<HIsStringAndBranch>(search_element);
9752 if_isstring.Then();
9753 {
9754 LoopBuilder loop(this, context(), direction);
9755 {
9756 HValue* index = loop.BeginBody(initial, terminating, token);
9757 HValue* element = AddUncasted<HLoadKeyed>(
9758 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9759 IfBuilder if_issame(this);
9760 if_issame.If<HIsStringAndBranch>(element);
9761 if_issame.AndIf<HStringCompareAndBranch>(
9762 element, search_element, Token::EQ_STRICT);
9763 if_issame.Then();
9764 {
9765 Drop(1);
9766 Push(index);
9767 loop.Break();
9768 }
9769 if_issame.End();
9770 }
9771 loop.EndBody();
9772 }
9773 if_isstring.Else();
9774 {
9775 IfBuilder if_isnumber(this);
9776 if_isnumber.If<HIsSmiAndBranch>(search_element);
9777 if_isnumber.OrIf<HCompareMap>(
9778 search_element, isolate()->factory()->heap_number_map());
9779 if_isnumber.Then();
9780 {
9781 HValue* search_number =
9782 AddUncasted<HForceRepresentation>(search_element,
9783 Representation::Double());
9784 LoopBuilder loop(this, context(), direction);
9785 {
9786 HValue* index = loop.BeginBody(initial, terminating, token);
9787 HValue* element = AddUncasted<HLoadKeyed>(
9788 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9789
9790 IfBuilder if_element_isnumber(this);
9791 if_element_isnumber.If<HIsSmiAndBranch>(element);
9792 if_element_isnumber.OrIf<HCompareMap>(
9793 element, isolate()->factory()->heap_number_map());
9794 if_element_isnumber.Then();
9795 {
9796 HValue* number =
9797 AddUncasted<HForceRepresentation>(element,
9798 Representation::Double());
9799 IfBuilder if_issame(this);
9800 if_issame.If<HCompareNumericAndBranch>(
9801 number, search_number, Token::EQ_STRICT);
9802 if_issame.Then();
9803 {
9804 Drop(1);
9805 Push(index);
9806 loop.Break();
9807 }
9808 if_issame.End();
9809 }
9810 if_element_isnumber.End();
9811 }
9812 loop.EndBody();
9813 }
9814 if_isnumber.Else();
9815 {
9816 LoopBuilder loop(this, context(), direction);
9817 {
9818 HValue* index = loop.BeginBody(initial, terminating, token);
9819 HValue* element = AddUncasted<HLoadKeyed>(
9820 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9821 IfBuilder if_issame(this);
9822 if_issame.If<HCompareObjectEqAndBranch>(
9823 element, search_element);
9824 if_issame.Then();
9825 {
9826 Drop(1);
9827 Push(index);
9828 loop.Break();
9829 }
9830 if_issame.End();
9831 }
9832 loop.EndBody();
9833 }
9834 if_isnumber.End();
9835 }
9836 if_isstring.End();
9837 }
9838
9839 return Pop();
9840}
9841
9842
9843bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) {
9844 if (!array_function().is_identical_to(expr->target())) {
9845 return false;
9846 }
9847
9848 Handle<AllocationSite> site = expr->allocation_site();
9849 if (site.is_null()) return false;
9850
9851 BuildArrayCall(expr,
9852 expr->arguments()->length(),
9853 function,
9854 site);
9855 return true;
9856}
9857
9858
9859bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr,
9860 HValue* function) {
9861 if (!array_function().is_identical_to(expr->target())) {
9862 return false;
9863 }
9864
9865 Handle<AllocationSite> site = expr->allocation_site();
9866 if (site.is_null()) return false;
9867
9868 BuildArrayCall(expr, expr->arguments()->length(), function, site);
9869 return true;
9870}
9871
9872
9873bool HOptimizedGraphBuilder::CanBeFunctionApplyArguments(Call* expr) {
9874 ZoneList<Expression*>* args = expr->arguments();
9875 if (args->length() != 2) return false;
9876 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
9877 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
9878 HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
9879 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
9880 return true;
9881}
9882
9883
9884void HOptimizedGraphBuilder::VisitCall(Call* expr) {
9885 DCHECK(!HasStackOverflow());
9886 DCHECK(current_block() != NULL);
9887 DCHECK(current_block()->HasPredecessor());
9888 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
9889 Expression* callee = expr->expression();
9890 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
9891 HInstruction* call = NULL;
9892
Ben Murdochda12d292016-06-02 14:46:10 +01009893 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9894 TailCallMode tail_call_mode =
9895 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009897 Property* prop = callee->AsProperty();
9898 if (prop != NULL) {
9899 CHECK_ALIVE(VisitForValue(prop->obj()));
9900 HValue* receiver = Top();
9901
9902 SmallMapList* maps;
Ben Murdoch097c5b22016-05-18 11:27:45 +01009903 ComputeReceiverTypes(expr, receiver, &maps, this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009904
9905 if (prop->key()->IsPropertyName() && maps->length() > 0) {
9906 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
9907 PropertyAccessInfo info(this, LOAD, maps->first(), name);
9908 if (!info.CanAccessAsMonomorphic(maps)) {
9909 HandlePolymorphicCallNamed(expr, receiver, maps, name);
9910 return;
9911 }
9912 }
9913 HValue* key = NULL;
9914 if (!prop->key()->IsPropertyName()) {
9915 CHECK_ALIVE(VisitForValue(prop->key()));
9916 key = Pop();
9917 }
9918
9919 CHECK_ALIVE(PushLoad(prop, receiver, key));
9920 HValue* function = Pop();
9921
9922 if (function->IsConstant() &&
9923 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9924 // Push the function under the receiver.
9925 environment()->SetExpressionStackAt(0, function);
9926 Push(receiver);
9927
9928 Handle<JSFunction> known_function = Handle<JSFunction>::cast(
9929 HConstant::cast(function)->handle(isolate()));
9930 expr->set_target(known_function);
9931
9932 if (TryIndirectCall(expr)) return;
9933 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9934
9935 Handle<Map> map = maps->length() == 1 ? maps->first() : Handle<Map>();
9936 if (TryInlineBuiltinMethodCall(expr, known_function, map,
9937 expr->arguments()->length())) {
9938 if (FLAG_trace_inlining) {
9939 PrintF("Inlining builtin ");
9940 known_function->ShortPrint();
9941 PrintF("\n");
9942 }
9943 return;
9944 }
9945 if (TryInlineApiMethodCall(expr, receiver, maps)) return;
9946
9947 // Wrap the receiver if necessary.
9948 if (NeedsWrapping(maps->first(), known_function)) {
9949 // Since HWrapReceiver currently cannot actually wrap numbers and
Ben Murdochda12d292016-06-02 14:46:10 +01009950 // strings, use the regular call builtin for method calls to wrap
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009951 // the receiver.
9952 // TODO(verwaest): Support creation of value wrappers directly in
9953 // HWrapReceiver.
Ben Murdochda12d292016-06-02 14:46:10 +01009954 call = NewCallFunction(
9955 function, argument_count, syntactic_tail_call_mode,
9956 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009957 } else if (TryInlineCall(expr)) {
9958 return;
9959 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01009960 call =
9961 NewCallConstantFunction(known_function, argument_count,
9962 syntactic_tail_call_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009963 }
9964
9965 } else {
9966 ArgumentsAllowedFlag arguments_flag = ARGUMENTS_NOT_ALLOWED;
9967 if (CanBeFunctionApplyArguments(expr) && expr->is_uninitialized()) {
9968 // We have to use EAGER deoptimization here because Deoptimizer::SOFT
9969 // gets ignored by the always-opt flag, which leads to incorrect code.
9970 Add<HDeoptimize>(
9971 Deoptimizer::kInsufficientTypeFeedbackForCallWithArguments,
9972 Deoptimizer::EAGER);
9973 arguments_flag = ARGUMENTS_FAKED;
9974 }
9975
9976 // Push the function under the receiver.
9977 environment()->SetExpressionStackAt(0, function);
9978 Push(receiver);
9979
9980 CHECK_ALIVE(VisitExpressions(expr->arguments(), arguments_flag));
Ben Murdochda12d292016-06-02 14:46:10 +01009981 call = NewCallFunction(function, argument_count, syntactic_tail_call_mode,
9982 ConvertReceiverMode::kNotNullOrUndefined,
9983 tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009984 }
9985 PushArgumentsFromEnvironment(argument_count);
9986
9987 } else {
9988 VariableProxy* proxy = expr->expression()->AsVariableProxy();
9989 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
9990 return Bailout(kPossibleDirectCallToEval);
9991 }
9992
9993 // The function is on the stack in the unoptimized code during
9994 // evaluation of the arguments.
9995 CHECK_ALIVE(VisitForValue(expr->expression()));
9996 HValue* function = Top();
9997 if (function->IsConstant() &&
9998 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9999 Handle<Object> constant = HConstant::cast(function)->handle(isolate());
10000 Handle<JSFunction> target = Handle<JSFunction>::cast(constant);
10001 expr->SetKnownGlobalTarget(target);
10002 }
10003
10004 // Placeholder for the receiver.
10005 Push(graph()->GetConstantUndefined());
10006 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10007
10008 if (expr->IsMonomorphic() &&
10009 !IsClassConstructor(expr->target()->shared()->kind())) {
10010 Add<HCheckValue>(function, expr->target());
10011
10012 // Patch the global object on the stack by the expected receiver.
10013 HValue* receiver = ImplicitReceiverFor(function, expr->target());
10014 const int receiver_index = argument_count - 1;
10015 environment()->SetExpressionStackAt(receiver_index, receiver);
10016
10017 if (TryInlineBuiltinFunctionCall(expr)) {
10018 if (FLAG_trace_inlining) {
10019 PrintF("Inlining builtin ");
10020 expr->target()->ShortPrint();
10021 PrintF("\n");
10022 }
10023 return;
10024 }
10025 if (TryInlineApiFunctionCall(expr, receiver)) return;
10026 if (TryHandleArrayCall(expr, function)) return;
10027 if (TryInlineCall(expr)) return;
10028
10029 PushArgumentsFromEnvironment(argument_count);
Ben Murdochda12d292016-06-02 14:46:10 +010010030 call = NewCallConstantFunction(expr->target(), argument_count,
10031 syntactic_tail_call_mode, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010032 } else {
10033 PushArgumentsFromEnvironment(argument_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010034 if (expr->is_uninitialized() &&
10035 expr->IsUsingCallFeedbackICSlot(isolate())) {
10036 // We've never seen this call before, so let's have Crankshaft learn
10037 // through the type vector.
Ben Murdochda12d292016-06-02 14:46:10 +010010038 call = NewCallFunctionViaIC(function, argument_count,
10039 syntactic_tail_call_mode,
10040 ConvertReceiverMode::kNullOrUndefined,
10041 tail_call_mode, expr->CallFeedbackICSlot());
10042 } else {
10043 call = NewCallFunction(
10044 function, argument_count, syntactic_tail_call_mode,
10045 ConvertReceiverMode::kNullOrUndefined, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010046 }
10047 }
10048 }
10049
10050 Drop(1); // Drop the function.
10051 return ast_context()->ReturnInstruction(call, expr->id());
10052}
10053
10054
10055void HOptimizedGraphBuilder::BuildInlinedCallArray(
10056 Expression* expression,
10057 int argument_count,
10058 Handle<AllocationSite> site) {
10059 DCHECK(!site.is_null());
10060 DCHECK(argument_count >= 0 && argument_count <= 1);
10061 NoObservableSideEffectsScope no_effects(this);
10062
10063 // We should at least have the constructor on the expression stack.
10064 HValue* constructor = environment()->ExpressionStackAt(argument_count);
10065
10066 // Register on the site for deoptimization if the transition feedback changes.
10067 top_info()->dependencies()->AssumeTransitionStable(site);
10068 ElementsKind kind = site->GetElementsKind();
10069 HInstruction* site_instruction = Add<HConstant>(site);
10070
10071 // In the single constant argument case, we may have to adjust elements kind
10072 // to avoid creating a packed non-empty array.
10073 if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
10074 HValue* argument = environment()->Top();
10075 if (argument->IsConstant()) {
10076 HConstant* constant_argument = HConstant::cast(argument);
10077 DCHECK(constant_argument->HasSmiValue());
10078 int constant_array_size = constant_argument->Integer32Value();
10079 if (constant_array_size != 0) {
10080 kind = GetHoleyElementsKind(kind);
10081 }
10082 }
10083 }
10084
10085 // Build the array.
10086 JSArrayBuilder array_builder(this,
10087 kind,
10088 site_instruction,
10089 constructor,
10090 DISABLE_ALLOCATION_SITES);
10091 HValue* new_object = argument_count == 0
10092 ? array_builder.AllocateEmptyArray()
10093 : BuildAllocateArrayFromLength(&array_builder, Top());
10094
10095 int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
10096 Drop(args_to_drop);
10097 ast_context()->ReturnValue(new_object);
10098}
10099
10100
10101// Checks whether allocation using the given constructor can be inlined.
10102static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
10103 return constructor->has_initial_map() &&
Ben Murdoch097c5b22016-05-18 11:27:45 +010010104 !IsSubclassConstructor(constructor->shared()->kind()) &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010105 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
10106 constructor->initial_map()->instance_size() <
10107 HAllocate::kMaxInlineSize;
10108}
10109
10110
10111bool HOptimizedGraphBuilder::IsCallArrayInlineable(
10112 int argument_count,
10113 Handle<AllocationSite> site) {
10114 Handle<JSFunction> caller = current_info()->closure();
10115 Handle<JSFunction> target = array_function();
10116 // We should have the function plus array arguments on the environment stack.
10117 DCHECK(environment()->length() >= (argument_count + 1));
10118 DCHECK(!site.is_null());
10119
10120 bool inline_ok = false;
10121 if (site->CanInlineCall()) {
10122 // We also want to avoid inlining in certain 1 argument scenarios.
10123 if (argument_count == 1) {
10124 HValue* argument = Top();
10125 if (argument->IsConstant()) {
10126 // Do not inline if the constant length argument is not a smi or
10127 // outside the valid range for unrolled loop initialization.
10128 HConstant* constant_argument = HConstant::cast(argument);
10129 if (constant_argument->HasSmiValue()) {
10130 int value = constant_argument->Integer32Value();
10131 inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
10132 if (!inline_ok) {
10133 TraceInline(target, caller,
10134 "Constant length outside of valid inlining range.");
10135 }
10136 }
10137 } else {
10138 TraceInline(target, caller,
10139 "Dont inline [new] Array(n) where n isn't constant.");
10140 }
10141 } else if (argument_count == 0) {
10142 inline_ok = true;
10143 } else {
10144 TraceInline(target, caller, "Too many arguments to inline.");
10145 }
10146 } else {
10147 TraceInline(target, caller, "AllocationSite requested no inlining.");
10148 }
10149
10150 if (inline_ok) {
10151 TraceInline(target, caller, NULL);
10152 }
10153 return inline_ok;
10154}
10155
10156
10157void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
10158 DCHECK(!HasStackOverflow());
10159 DCHECK(current_block() != NULL);
10160 DCHECK(current_block()->HasPredecessor());
10161 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
10162 int argument_count = expr->arguments()->length() + 1; // Plus constructor.
10163 Factory* factory = isolate()->factory();
10164
10165 // The constructor function is on the stack in the unoptimized code
10166 // during evaluation of the arguments.
10167 CHECK_ALIVE(VisitForValue(expr->expression()));
10168 HValue* function = Top();
10169 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10170
10171 if (function->IsConstant() &&
10172 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
10173 Handle<Object> constant = HConstant::cast(function)->handle(isolate());
10174 expr->SetKnownGlobalTarget(Handle<JSFunction>::cast(constant));
10175 }
10176
10177 if (FLAG_inline_construct &&
10178 expr->IsMonomorphic() &&
10179 IsAllocationInlineable(expr->target())) {
10180 Handle<JSFunction> constructor = expr->target();
10181 DCHECK(
10182 constructor->shared()->construct_stub() ==
10183 isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric) ||
10184 constructor->shared()->construct_stub() ==
10185 isolate()->builtins()->builtin(Builtins::kJSConstructStubApi));
10186 HValue* check = Add<HCheckValue>(function, constructor);
10187
10188 // Force completion of inobject slack tracking before generating
10189 // allocation code to finalize instance size.
10190 constructor->CompleteInobjectSlackTrackingIfActive();
10191
10192 // Calculate instance size from initial map of constructor.
10193 DCHECK(constructor->has_initial_map());
10194 Handle<Map> initial_map(constructor->initial_map());
10195 int instance_size = initial_map->instance_size();
10196
10197 // Allocate an instance of the implicit receiver object.
10198 HValue* size_in_bytes = Add<HConstant>(instance_size);
10199 HAllocationMode allocation_mode;
10200 HAllocate* receiver = BuildAllocate(
10201 size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
10202 receiver->set_known_initial_map(initial_map);
10203
10204 // Initialize map and fields of the newly allocated object.
10205 { NoObservableSideEffectsScope no_effects(this);
10206 DCHECK(initial_map->instance_type() == JS_OBJECT_TYPE);
10207 Add<HStoreNamedField>(receiver,
10208 HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
10209 Add<HConstant>(initial_map));
10210 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
10211 Add<HStoreNamedField>(receiver,
10212 HObjectAccess::ForMapAndOffset(initial_map,
10213 JSObject::kPropertiesOffset),
10214 empty_fixed_array);
10215 Add<HStoreNamedField>(receiver,
10216 HObjectAccess::ForMapAndOffset(initial_map,
10217 JSObject::kElementsOffset),
10218 empty_fixed_array);
10219 BuildInitializeInobjectProperties(receiver, initial_map);
10220 }
10221
10222 // Replace the constructor function with a newly allocated receiver using
10223 // the index of the receiver from the top of the expression stack.
10224 const int receiver_index = argument_count - 1;
10225 DCHECK(environment()->ExpressionStackAt(receiver_index) == function);
10226 environment()->SetExpressionStackAt(receiver_index, receiver);
10227
10228 if (TryInlineConstruct(expr, receiver)) {
10229 // Inlining worked, add a dependency on the initial map to make sure that
10230 // this code is deoptimized whenever the initial map of the constructor
10231 // changes.
10232 top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
10233 return;
10234 }
10235
10236 // TODO(mstarzinger): For now we remove the previous HAllocate and all
10237 // corresponding instructions and instead add HPushArguments for the
10238 // arguments in case inlining failed. What we actually should do is for
10239 // inlining to try to build a subgraph without mutating the parent graph.
10240 HInstruction* instr = current_block()->last();
10241 do {
10242 HInstruction* prev_instr = instr->previous();
10243 instr->DeleteAndReplaceWith(NULL);
10244 instr = prev_instr;
10245 } while (instr != check);
10246 environment()->SetExpressionStackAt(receiver_index, function);
10247 } else {
10248 // The constructor function is both an operand to the instruction and an
10249 // argument to the construct call.
10250 if (TryHandleArrayCallNew(expr, function)) return;
10251 }
10252
10253 HValue* arity = Add<HConstant>(argument_count - 1);
10254 HValue* op_vals[] = {context(), function, function, arity};
10255 Callable callable = CodeFactory::Construct(isolate());
10256 HConstant* stub = Add<HConstant>(callable.code());
10257 PushArgumentsFromEnvironment(argument_count);
Ben Murdochc5610432016-08-08 18:44:38 +010010258 HInstruction* construct = New<HCallWithDescriptor>(
10259 stub, argument_count, callable.descriptor(), ArrayVector(op_vals));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010260 return ast_context()->ReturnInstruction(construct, expr->id());
10261}
10262
10263
10264void HOptimizedGraphBuilder::BuildInitializeInobjectProperties(
10265 HValue* receiver, Handle<Map> initial_map) {
10266 if (initial_map->GetInObjectProperties() != 0) {
10267 HConstant* undefined = graph()->GetConstantUndefined();
10268 for (int i = 0; i < initial_map->GetInObjectProperties(); i++) {
10269 int property_offset = initial_map->GetInObjectPropertyOffset(i);
10270 Add<HStoreNamedField>(receiver, HObjectAccess::ForMapAndOffset(
10271 initial_map, property_offset),
10272 undefined);
10273 }
10274 }
10275}
10276
10277
10278HValue* HGraphBuilder::BuildAllocateEmptyArrayBuffer(HValue* byte_length) {
10279 // We HForceRepresentation here to avoid allocations during an *-to-tagged
10280 // HChange that could cause GC while the array buffer object is not fully
10281 // initialized.
10282 HObjectAccess byte_length_access(HObjectAccess::ForJSArrayBufferByteLength());
10283 byte_length = AddUncasted<HForceRepresentation>(
10284 byte_length, byte_length_access.representation());
10285 HAllocate* result =
10286 BuildAllocate(Add<HConstant>(JSArrayBuffer::kSizeWithInternalFields),
10287 HType::JSObject(), JS_ARRAY_BUFFER_TYPE, HAllocationMode());
10288
10289 HValue* native_context = BuildGetNativeContext();
10290 Add<HStoreNamedField>(
10291 result, HObjectAccess::ForMap(),
10292 Add<HLoadNamedField>(
10293 native_context, nullptr,
10294 HObjectAccess::ForContextSlot(Context::ARRAY_BUFFER_MAP_INDEX)));
10295
10296 HConstant* empty_fixed_array =
10297 Add<HConstant>(isolate()->factory()->empty_fixed_array());
10298 Add<HStoreNamedField>(
10299 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
10300 empty_fixed_array);
10301 Add<HStoreNamedField>(
10302 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
10303 empty_fixed_array);
10304 Add<HStoreNamedField>(
10305 result, HObjectAccess::ForJSArrayBufferBackingStore().WithRepresentation(
10306 Representation::Smi()),
10307 graph()->GetConstant0());
10308 Add<HStoreNamedField>(result, byte_length_access, byte_length);
10309 Add<HStoreNamedField>(result, HObjectAccess::ForJSArrayBufferBitFieldSlot(),
10310 graph()->GetConstant0());
10311 Add<HStoreNamedField>(
10312 result, HObjectAccess::ForJSArrayBufferBitField(),
10313 Add<HConstant>((1 << JSArrayBuffer::IsExternal::kShift) |
10314 (1 << JSArrayBuffer::IsNeuterable::kShift)));
10315
10316 for (int field = 0; field < v8::ArrayBuffer::kInternalFieldCount; ++field) {
10317 Add<HStoreNamedField>(
10318 result,
10319 HObjectAccess::ForObservableJSObjectOffset(
10320 JSArrayBuffer::kSize + field * kPointerSize, Representation::Smi()),
10321 graph()->GetConstant0());
10322 }
10323
10324 return result;
10325}
10326
10327
10328template <class ViewClass>
10329void HGraphBuilder::BuildArrayBufferViewInitialization(
10330 HValue* obj,
10331 HValue* buffer,
10332 HValue* byte_offset,
10333 HValue* byte_length) {
10334
10335 for (int offset = ViewClass::kSize;
10336 offset < ViewClass::kSizeWithInternalFields;
10337 offset += kPointerSize) {
10338 Add<HStoreNamedField>(obj,
10339 HObjectAccess::ForObservableJSObjectOffset(offset),
10340 graph()->GetConstant0());
10341 }
10342
10343 Add<HStoreNamedField>(
10344 obj,
10345 HObjectAccess::ForJSArrayBufferViewByteOffset(),
10346 byte_offset);
10347 Add<HStoreNamedField>(
10348 obj,
10349 HObjectAccess::ForJSArrayBufferViewByteLength(),
10350 byte_length);
10351 Add<HStoreNamedField>(obj, HObjectAccess::ForJSArrayBufferViewBuffer(),
10352 buffer);
10353}
10354
10355
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010356HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
10357 ExternalArrayType array_type,
10358 bool is_zero_byte_offset,
10359 HValue* buffer, HValue* byte_offset, HValue* length) {
10360 Handle<Map> external_array_map(
10361 isolate()->heap()->MapForFixedTypedArray(array_type));
10362
10363 // The HForceRepresentation is to prevent possible deopt on int-smi
10364 // conversion after allocation but before the new object fields are set.
10365 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10366 HValue* elements = Add<HAllocate>(
10367 Add<HConstant>(FixedTypedArrayBase::kHeaderSize), HType::HeapObject(),
Ben Murdochc5610432016-08-08 18:44:38 +010010368 NOT_TENURED, external_array_map->instance_type(),
10369 graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010370
10371 AddStoreMapConstant(elements, external_array_map);
10372 Add<HStoreNamedField>(elements,
10373 HObjectAccess::ForFixedArrayLength(), length);
10374
10375 HValue* backing_store = Add<HLoadNamedField>(
10376 buffer, nullptr, HObjectAccess::ForJSArrayBufferBackingStore());
10377
10378 HValue* typed_array_start;
10379 if (is_zero_byte_offset) {
10380 typed_array_start = backing_store;
10381 } else {
10382 HInstruction* external_pointer =
10383 AddUncasted<HAdd>(backing_store, byte_offset);
10384 // Arguments are checked prior to call to TypedArrayInitialize,
10385 // including byte_offset.
10386 external_pointer->ClearFlag(HValue::kCanOverflow);
10387 typed_array_start = external_pointer;
10388 }
10389
10390 Add<HStoreNamedField>(elements,
10391 HObjectAccess::ForFixedTypedArrayBaseBasePointer(),
10392 graph()->GetConstant0());
10393 Add<HStoreNamedField>(elements,
10394 HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
10395 typed_array_start);
10396
10397 return elements;
10398}
10399
10400
10401HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
10402 ExternalArrayType array_type, size_t element_size,
10403 ElementsKind fixed_elements_kind, HValue* byte_length, HValue* length,
10404 bool initialize) {
10405 STATIC_ASSERT(
10406 (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
10407 HValue* total_size;
10408
10409 // if fixed array's elements are not aligned to object's alignment,
10410 // we need to align the whole array to object alignment.
10411 if (element_size % kObjectAlignment != 0) {
10412 total_size = BuildObjectSizeAlignment(
10413 byte_length, FixedTypedArrayBase::kHeaderSize);
10414 } else {
10415 total_size = AddUncasted<HAdd>(byte_length,
10416 Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
10417 total_size->ClearFlag(HValue::kCanOverflow);
10418 }
10419
10420 // The HForceRepresentation is to prevent possible deopt on int-smi
10421 // conversion after allocation but before the new object fields are set.
10422 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10423 Handle<Map> fixed_typed_array_map(
10424 isolate()->heap()->MapForFixedTypedArray(array_type));
Ben Murdochc5610432016-08-08 18:44:38 +010010425 HAllocate* elements = Add<HAllocate>(
10426 total_size, HType::HeapObject(), NOT_TENURED,
10427 fixed_typed_array_map->instance_type(), graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010428
10429#ifndef V8_HOST_ARCH_64_BIT
10430 if (array_type == kExternalFloat64Array) {
10431 elements->MakeDoubleAligned();
10432 }
10433#endif
10434
10435 AddStoreMapConstant(elements, fixed_typed_array_map);
10436
10437 Add<HStoreNamedField>(elements,
10438 HObjectAccess::ForFixedArrayLength(),
10439 length);
10440 Add<HStoreNamedField>(
10441 elements, HObjectAccess::ForFixedTypedArrayBaseBasePointer(), elements);
10442
10443 Add<HStoreNamedField>(
10444 elements, HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
10445 Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()));
10446
10447 HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
10448
10449 if (initialize) {
10450 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
10451
10452 HValue* backing_store = AddUncasted<HAdd>(
10453 Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()),
Ben Murdoch097c5b22016-05-18 11:27:45 +010010454 elements, AddOfExternalAndTagged);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010455
10456 HValue* key = builder.BeginBody(
10457 Add<HConstant>(static_cast<int32_t>(0)),
10458 length, Token::LT);
10459 Add<HStoreKeyed>(backing_store, key, filler, elements, fixed_elements_kind);
10460
10461 builder.EndBody();
10462 }
10463 return elements;
10464}
10465
10466
10467void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
10468 CallRuntime* expr) {
10469 ZoneList<Expression*>* arguments = expr->arguments();
10470
10471 static const int kObjectArg = 0;
10472 static const int kArrayIdArg = 1;
10473 static const int kBufferArg = 2;
10474 static const int kByteOffsetArg = 3;
10475 static const int kByteLengthArg = 4;
10476 static const int kInitializeArg = 5;
10477 static const int kArgsLength = 6;
10478 DCHECK(arguments->length() == kArgsLength);
10479
10480
10481 CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
10482 HValue* obj = Pop();
10483
10484 if (!arguments->at(kArrayIdArg)->IsLiteral()) {
10485 // This should never happen in real use, but can happen when fuzzing.
10486 // Just bail out.
10487 Bailout(kNeedSmiLiteral);
10488 return;
10489 }
10490 Handle<Object> value =
10491 static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
10492 if (!value->IsSmi()) {
10493 // This should never happen in real use, but can happen when fuzzing.
10494 // Just bail out.
10495 Bailout(kNeedSmiLiteral);
10496 return;
10497 }
10498 int array_id = Smi::cast(*value)->value();
10499
10500 HValue* buffer;
10501 if (!arguments->at(kBufferArg)->IsNullLiteral()) {
10502 CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
10503 buffer = Pop();
10504 } else {
10505 buffer = NULL;
10506 }
10507
10508 HValue* byte_offset;
10509 bool is_zero_byte_offset;
10510
10511 if (arguments->at(kByteOffsetArg)->IsLiteral()
10512 && Smi::FromInt(0) ==
10513 *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
10514 byte_offset = Add<HConstant>(static_cast<int32_t>(0));
10515 is_zero_byte_offset = true;
10516 } else {
10517 CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
10518 byte_offset = Pop();
10519 is_zero_byte_offset = false;
10520 DCHECK(buffer != NULL);
10521 }
10522
10523 CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
10524 HValue* byte_length = Pop();
10525
10526 CHECK(arguments->at(kInitializeArg)->IsLiteral());
10527 bool initialize = static_cast<Literal*>(arguments->at(kInitializeArg))
10528 ->value()
10529 ->BooleanValue();
10530
10531 NoObservableSideEffectsScope scope(this);
10532 IfBuilder byte_offset_smi(this);
10533
10534 if (!is_zero_byte_offset) {
10535 byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
10536 byte_offset_smi.Then();
10537 }
10538
10539 ExternalArrayType array_type =
10540 kExternalInt8Array; // Bogus initialization.
10541 size_t element_size = 1; // Bogus initialization.
10542 ElementsKind fixed_elements_kind = // Bogus initialization.
10543 INT8_ELEMENTS;
10544 Runtime::ArrayIdToTypeAndSize(array_id,
10545 &array_type,
10546 &fixed_elements_kind,
10547 &element_size);
10548
10549
10550 { // byte_offset is Smi.
10551 HValue* allocated_buffer = buffer;
10552 if (buffer == NULL) {
10553 allocated_buffer = BuildAllocateEmptyArrayBuffer(byte_length);
10554 }
10555 BuildArrayBufferViewInitialization<JSTypedArray>(obj, allocated_buffer,
10556 byte_offset, byte_length);
10557
10558
10559 HInstruction* length = AddUncasted<HDiv>(byte_length,
10560 Add<HConstant>(static_cast<int32_t>(element_size)));
10561
10562 Add<HStoreNamedField>(obj,
10563 HObjectAccess::ForJSTypedArrayLength(),
10564 length);
10565
10566 HValue* elements;
10567 if (buffer != NULL) {
10568 elements = BuildAllocateExternalElements(
10569 array_type, is_zero_byte_offset, buffer, byte_offset, length);
10570 } else {
10571 DCHECK(is_zero_byte_offset);
10572 elements = BuildAllocateFixedTypedArray(array_type, element_size,
10573 fixed_elements_kind, byte_length,
10574 length, initialize);
10575 }
10576 Add<HStoreNamedField>(
10577 obj, HObjectAccess::ForElementsPointer(), elements);
10578 }
10579
10580 if (!is_zero_byte_offset) {
10581 byte_offset_smi.Else();
10582 { // byte_offset is not Smi.
10583 Push(obj);
10584 CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
10585 Push(buffer);
10586 Push(byte_offset);
10587 Push(byte_length);
10588 CHECK_ALIVE(VisitForValue(arguments->at(kInitializeArg)));
10589 PushArgumentsFromEnvironment(kArgsLength);
10590 Add<HCallRuntime>(expr->function(), kArgsLength);
10591 }
10592 }
10593 byte_offset_smi.End();
10594}
10595
10596
10597void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
10598 DCHECK(expr->arguments()->length() == 0);
10599 HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
10600 return ast_context()->ReturnInstruction(max_smi, expr->id());
10601}
10602
10603
10604void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
10605 CallRuntime* expr) {
10606 DCHECK(expr->arguments()->length() == 0);
10607 HConstant* result = New<HConstant>(static_cast<int32_t>(
10608 FLAG_typed_array_max_size_in_heap));
10609 return ast_context()->ReturnInstruction(result, expr->id());
10610}
10611
10612
10613void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
10614 CallRuntime* expr) {
10615 DCHECK(expr->arguments()->length() == 1);
10616 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10617 HValue* buffer = Pop();
10618 HInstruction* result = New<HLoadNamedField>(
10619 buffer, nullptr, HObjectAccess::ForJSArrayBufferByteLength());
10620 return ast_context()->ReturnInstruction(result, expr->id());
10621}
10622
10623
10624void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
10625 CallRuntime* expr) {
10626 NoObservableSideEffectsScope scope(this);
10627 DCHECK(expr->arguments()->length() == 1);
10628 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10629 HValue* view = Pop();
10630
10631 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10632 view, nullptr,
10633 FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteLengthOffset)));
10634}
10635
10636
10637void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
10638 CallRuntime* expr) {
10639 NoObservableSideEffectsScope scope(this);
10640 DCHECK(expr->arguments()->length() == 1);
10641 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10642 HValue* view = Pop();
10643
10644 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10645 view, nullptr,
10646 FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteOffsetOffset)));
10647}
10648
10649
10650void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
10651 CallRuntime* expr) {
10652 NoObservableSideEffectsScope scope(this);
10653 DCHECK(expr->arguments()->length() == 1);
10654 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10655 HValue* view = Pop();
10656
10657 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10658 view, nullptr,
10659 FieldIndex::ForInObjectOffset(JSTypedArray::kLengthOffset)));
10660}
10661
10662
10663void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
10664 DCHECK(!HasStackOverflow());
10665 DCHECK(current_block() != NULL);
10666 DCHECK(current_block()->HasPredecessor());
10667 if (expr->is_jsruntime()) {
Ben Murdochda12d292016-06-02 14:46:10 +010010668 // Crankshaft always specializes to the native context, so we can just grab
10669 // the constant function from the current native context and embed that into
10670 // the code object.
10671 Handle<JSFunction> known_function(
10672 JSFunction::cast(
10673 current_info()->native_context()->get(expr->context_index())),
10674 isolate());
10675
10676 // The callee and the receiver both have to be pushed onto the operand stack
10677 // before arguments are being evaluated.
10678 HConstant* function = Add<HConstant>(known_function);
10679 HValue* receiver = ImplicitReceiverFor(function, known_function);
10680 Push(function);
10681 Push(receiver);
10682
10683 int argument_count = expr->arguments()->length() + 1; // Count receiver.
10684 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10685 PushArgumentsFromEnvironment(argument_count);
10686 HInstruction* call = NewCallConstantFunction(known_function, argument_count,
10687 TailCallMode::kDisallow,
10688 TailCallMode::kDisallow);
10689 Drop(1); // Function
10690 return ast_context()->ReturnInstruction(call, expr->id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010691 }
10692
10693 const Runtime::Function* function = expr->function();
10694 DCHECK(function != NULL);
10695 switch (function->function_id) {
10696#define CALL_INTRINSIC_GENERATOR(Name) \
10697 case Runtime::kInline##Name: \
10698 return Generate##Name(expr);
10699
10700 FOR_EACH_HYDROGEN_INTRINSIC(CALL_INTRINSIC_GENERATOR)
10701#undef CALL_INTRINSIC_GENERATOR
10702 default: {
10703 int argument_count = expr->arguments()->length();
10704 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10705 PushArgumentsFromEnvironment(argument_count);
10706 HCallRuntime* call = New<HCallRuntime>(function, argument_count);
10707 return ast_context()->ReturnInstruction(call, expr->id());
10708 }
10709 }
10710}
10711
10712
10713void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
10714 DCHECK(!HasStackOverflow());
10715 DCHECK(current_block() != NULL);
10716 DCHECK(current_block()->HasPredecessor());
10717 switch (expr->op()) {
10718 case Token::DELETE: return VisitDelete(expr);
10719 case Token::VOID: return VisitVoid(expr);
10720 case Token::TYPEOF: return VisitTypeof(expr);
10721 case Token::NOT: return VisitNot(expr);
10722 default: UNREACHABLE();
10723 }
10724}
10725
10726
10727void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
10728 Property* prop = expr->expression()->AsProperty();
10729 VariableProxy* proxy = expr->expression()->AsVariableProxy();
10730 if (prop != NULL) {
10731 CHECK_ALIVE(VisitForValue(prop->obj()));
10732 CHECK_ALIVE(VisitForValue(prop->key()));
10733 HValue* key = Pop();
10734 HValue* obj = Pop();
10735 Add<HPushArguments>(obj, key);
10736 HInstruction* instr = New<HCallRuntime>(
10737 Runtime::FunctionForId(is_strict(function_language_mode())
10738 ? Runtime::kDeleteProperty_Strict
10739 : Runtime::kDeleteProperty_Sloppy),
10740 2);
10741 return ast_context()->ReturnInstruction(instr, expr->id());
10742 } else if (proxy != NULL) {
10743 Variable* var = proxy->var();
10744 if (var->IsUnallocatedOrGlobalSlot()) {
10745 Bailout(kDeleteWithGlobalVariable);
10746 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
10747 // Result of deleting non-global variables is false. 'this' is not really
10748 // a variable, though we implement it as one. The subexpression does not
10749 // have side effects.
10750 HValue* value = var->HasThisName(isolate()) ? graph()->GetConstantTrue()
10751 : graph()->GetConstantFalse();
10752 return ast_context()->ReturnValue(value);
10753 } else {
10754 Bailout(kDeleteWithNonGlobalVariable);
10755 }
10756 } else {
10757 // Result of deleting non-property, non-variable reference is true.
10758 // Evaluate the subexpression for side effects.
10759 CHECK_ALIVE(VisitForEffect(expr->expression()));
10760 return ast_context()->ReturnValue(graph()->GetConstantTrue());
10761 }
10762}
10763
10764
10765void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
10766 CHECK_ALIVE(VisitForEffect(expr->expression()));
10767 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10768}
10769
10770
10771void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
10772 CHECK_ALIVE(VisitForTypeOf(expr->expression()));
10773 HValue* value = Pop();
10774 HInstruction* instr = New<HTypeof>(value);
10775 return ast_context()->ReturnInstruction(instr, expr->id());
10776}
10777
10778
10779void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
10780 if (ast_context()->IsTest()) {
10781 TestContext* context = TestContext::cast(ast_context());
10782 VisitForControl(expr->expression(),
10783 context->if_false(),
10784 context->if_true());
10785 return;
10786 }
10787
10788 if (ast_context()->IsEffect()) {
10789 VisitForEffect(expr->expression());
10790 return;
10791 }
10792
10793 DCHECK(ast_context()->IsValue());
10794 HBasicBlock* materialize_false = graph()->CreateBasicBlock();
10795 HBasicBlock* materialize_true = graph()->CreateBasicBlock();
10796 CHECK_BAILOUT(VisitForControl(expr->expression(),
10797 materialize_false,
10798 materialize_true));
10799
10800 if (materialize_false->HasPredecessor()) {
10801 materialize_false->SetJoinId(expr->MaterializeFalseId());
10802 set_current_block(materialize_false);
10803 Push(graph()->GetConstantFalse());
10804 } else {
10805 materialize_false = NULL;
10806 }
10807
10808 if (materialize_true->HasPredecessor()) {
10809 materialize_true->SetJoinId(expr->MaterializeTrueId());
10810 set_current_block(materialize_true);
10811 Push(graph()->GetConstantTrue());
10812 } else {
10813 materialize_true = NULL;
10814 }
10815
10816 HBasicBlock* join =
10817 CreateJoin(materialize_false, materialize_true, expr->id());
10818 set_current_block(join);
10819 if (join != NULL) return ast_context()->ReturnValue(Pop());
10820}
10821
10822
10823static Representation RepresentationFor(Type* type) {
10824 DisallowHeapAllocation no_allocation;
10825 if (type->Is(Type::None())) return Representation::None();
10826 if (type->Is(Type::SignedSmall())) return Representation::Smi();
10827 if (type->Is(Type::Signed32())) return Representation::Integer32();
10828 if (type->Is(Type::Number())) return Representation::Double();
10829 return Representation::Tagged();
10830}
10831
10832
10833HInstruction* HOptimizedGraphBuilder::BuildIncrement(
10834 bool returns_original_input,
10835 CountOperation* expr) {
10836 // The input to the count operation is on top of the expression stack.
10837 Representation rep = RepresentationFor(expr->type());
10838 if (rep.IsNone() || rep.IsTagged()) {
10839 rep = Representation::Smi();
10840 }
10841
Ben Murdochda12d292016-06-02 14:46:10 +010010842 if (returns_original_input) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010843 // We need an explicit HValue representing ToNumber(input). The
10844 // actual HChange instruction we need is (sometimes) added in a later
10845 // phase, so it is not available now to be used as an input to HAdd and
10846 // as the return value.
10847 HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
10848 if (!rep.IsDouble()) {
10849 number_input->SetFlag(HInstruction::kFlexibleRepresentation);
10850 number_input->SetFlag(HInstruction::kCannotBeTagged);
10851 }
10852 Push(number_input);
10853 }
10854
10855 // The addition has no side effects, so we do not need
10856 // to simulate the expression stack after this instruction.
10857 // Any later failures deopt to the load of the input or earlier.
10858 HConstant* delta = (expr->op() == Token::INC)
10859 ? graph()->GetConstant1()
10860 : graph()->GetConstantMinus1();
Ben Murdoch097c5b22016-05-18 11:27:45 +010010861 HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010862 if (instr->IsAdd()) {
10863 HAdd* add = HAdd::cast(instr);
10864 add->set_observed_input_representation(1, rep);
10865 add->set_observed_input_representation(2, Representation::Smi());
10866 }
Ben Murdochda12d292016-06-02 14:46:10 +010010867 instr->ClearAllSideEffects();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010868 instr->SetFlag(HInstruction::kCannotBeTagged);
10869 return instr;
10870}
10871
10872
10873void HOptimizedGraphBuilder::BuildStoreForEffect(
10874 Expression* expr, Property* prop, FeedbackVectorSlot slot, BailoutId ast_id,
10875 BailoutId return_id, HValue* object, HValue* key, HValue* value) {
10876 EffectContext for_effect(this);
10877 Push(object);
10878 if (key != NULL) Push(key);
10879 Push(value);
10880 BuildStore(expr, prop, slot, ast_id, return_id);
10881}
10882
10883
10884void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
10885 DCHECK(!HasStackOverflow());
10886 DCHECK(current_block() != NULL);
10887 DCHECK(current_block()->HasPredecessor());
10888 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
10889 Expression* target = expr->expression();
10890 VariableProxy* proxy = target->AsVariableProxy();
10891 Property* prop = target->AsProperty();
10892 if (proxy == NULL && prop == NULL) {
10893 return Bailout(kInvalidLhsInCountOperation);
10894 }
10895
10896 // Match the full code generator stack by simulating an extra stack
10897 // element for postfix operations in a non-effect context. The return
10898 // value is ToNumber(input).
10899 bool returns_original_input =
10900 expr->is_postfix() && !ast_context()->IsEffect();
10901 HValue* input = NULL; // ToNumber(original_input).
10902 HValue* after = NULL; // The result after incrementing or decrementing.
10903
10904 if (proxy != NULL) {
10905 Variable* var = proxy->var();
10906 if (var->mode() == CONST_LEGACY) {
10907 return Bailout(kUnsupportedCountOperationWithConst);
10908 }
10909 if (var->mode() == CONST) {
10910 return Bailout(kNonInitializerAssignmentToConst);
10911 }
10912 // Argument of the count operation is a variable, not a property.
10913 DCHECK(prop == NULL);
10914 CHECK_ALIVE(VisitForValue(target));
10915
10916 after = BuildIncrement(returns_original_input, expr);
10917 input = returns_original_input ? Top() : Pop();
10918 Push(after);
10919
10920 switch (var->location()) {
10921 case VariableLocation::GLOBAL:
10922 case VariableLocation::UNALLOCATED:
10923 HandleGlobalVariableAssignment(var, after, expr->CountSlot(),
10924 expr->AssignmentId());
10925 break;
10926
10927 case VariableLocation::PARAMETER:
10928 case VariableLocation::LOCAL:
10929 BindIfLive(var, after);
10930 break;
10931
10932 case VariableLocation::CONTEXT: {
10933 // Bail out if we try to mutate a parameter value in a function
10934 // using the arguments object. We do not (yet) correctly handle the
10935 // arguments property of the function.
10936 if (current_info()->scope()->arguments() != NULL) {
10937 // Parameters will rewrite to context slots. We have no direct
10938 // way to detect that the variable is a parameter so we use a
10939 // linear search of the parameter list.
10940 int count = current_info()->scope()->num_parameters();
10941 for (int i = 0; i < count; ++i) {
10942 if (var == current_info()->scope()->parameter(i)) {
10943 return Bailout(kAssignmentToParameterInArgumentsObject);
10944 }
10945 }
10946 }
10947
10948 HValue* context = BuildContextChainWalk(var);
10949 HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
10950 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
10951 HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
10952 mode, after);
10953 if (instr->HasObservableSideEffects()) {
10954 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
10955 }
10956 break;
10957 }
10958
10959 case VariableLocation::LOOKUP:
10960 return Bailout(kLookupVariableInCountOperation);
10961 }
10962
10963 Drop(returns_original_input ? 2 : 1);
10964 return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
10965 }
10966
10967 // Argument of the count operation is a property.
10968 DCHECK(prop != NULL);
10969 if (returns_original_input) Push(graph()->GetConstantUndefined());
10970
10971 CHECK_ALIVE(VisitForValue(prop->obj()));
10972 HValue* object = Top();
10973
10974 HValue* key = NULL;
10975 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
10976 CHECK_ALIVE(VisitForValue(prop->key()));
10977 key = Top();
10978 }
10979
10980 CHECK_ALIVE(PushLoad(prop, object, key));
10981
10982 after = BuildIncrement(returns_original_input, expr);
10983
10984 if (returns_original_input) {
10985 input = Pop();
10986 // Drop object and key to push it again in the effect context below.
10987 Drop(key == NULL ? 1 : 2);
10988 environment()->SetExpressionStackAt(0, input);
10989 CHECK_ALIVE(BuildStoreForEffect(expr, prop, expr->CountSlot(), expr->id(),
10990 expr->AssignmentId(), object, key, after));
10991 return ast_context()->ReturnValue(Pop());
10992 }
10993
10994 environment()->SetExpressionStackAt(0, after);
10995 return BuildStore(expr, prop, expr->CountSlot(), expr->id(),
10996 expr->AssignmentId());
10997}
10998
10999
11000HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
11001 HValue* string,
11002 HValue* index) {
11003 if (string->IsConstant() && index->IsConstant()) {
11004 HConstant* c_string = HConstant::cast(string);
11005 HConstant* c_index = HConstant::cast(index);
11006 if (c_string->HasStringValue() && c_index->HasNumberValue()) {
11007 int32_t i = c_index->NumberValueAsInteger32();
11008 Handle<String> s = c_string->StringValue();
11009 if (i < 0 || i >= s->length()) {
11010 return New<HConstant>(std::numeric_limits<double>::quiet_NaN());
11011 }
11012 return New<HConstant>(s->Get(i));
11013 }
11014 }
11015 string = BuildCheckString(string);
11016 index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
11017 return New<HStringCharCodeAt>(string, index);
11018}
11019
11020
11021// Checks if the given shift amounts have following forms:
11022// (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
11023static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
11024 HValue* const32_minus_sa) {
11025 if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
11026 const HConstant* c1 = HConstant::cast(sa);
11027 const HConstant* c2 = HConstant::cast(const32_minus_sa);
11028 return c1->HasInteger32Value() && c2->HasInteger32Value() &&
11029 (c1->Integer32Value() + c2->Integer32Value() == 32);
11030 }
11031 if (!const32_minus_sa->IsSub()) return false;
11032 HSub* sub = HSub::cast(const32_minus_sa);
11033 return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
11034}
11035
11036
11037// Checks if the left and the right are shift instructions with the oposite
11038// directions that can be replaced by one rotate right instruction or not.
11039// Returns the operand and the shift amount for the rotate instruction in the
11040// former case.
11041bool HGraphBuilder::MatchRotateRight(HValue* left,
11042 HValue* right,
11043 HValue** operand,
11044 HValue** shift_amount) {
11045 HShl* shl;
11046 HShr* shr;
11047 if (left->IsShl() && right->IsShr()) {
11048 shl = HShl::cast(left);
11049 shr = HShr::cast(right);
11050 } else if (left->IsShr() && right->IsShl()) {
11051 shl = HShl::cast(right);
11052 shr = HShr::cast(left);
11053 } else {
11054 return false;
11055 }
11056 if (shl->left() != shr->left()) return false;
11057
11058 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
11059 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
11060 return false;
11061 }
11062 *operand = shr->left();
11063 *shift_amount = shr->right();
11064 return true;
11065}
11066
11067
11068bool CanBeZero(HValue* right) {
11069 if (right->IsConstant()) {
11070 HConstant* right_const = HConstant::cast(right);
11071 if (right_const->HasInteger32Value() &&
11072 (right_const->Integer32Value() & 0x1f) != 0) {
11073 return false;
11074 }
11075 }
11076 return true;
11077}
11078
11079
11080HValue* HGraphBuilder::EnforceNumberType(HValue* number,
11081 Type* expected) {
11082 if (expected->Is(Type::SignedSmall())) {
11083 return AddUncasted<HForceRepresentation>(number, Representation::Smi());
11084 }
11085 if (expected->Is(Type::Signed32())) {
11086 return AddUncasted<HForceRepresentation>(number,
11087 Representation::Integer32());
11088 }
11089 return number;
11090}
11091
11092
11093HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
11094 if (value->IsConstant()) {
11095 HConstant* constant = HConstant::cast(value);
11096 Maybe<HConstant*> number =
11097 constant->CopyToTruncatedNumber(isolate(), zone());
11098 if (number.IsJust()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011099 *expected = Type::Number();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011100 return AddInstruction(number.FromJust());
11101 }
11102 }
11103
11104 // We put temporary values on the stack, which don't correspond to anything
11105 // in baseline code. Since nothing is observable we avoid recording those
11106 // pushes with a NoObservableSideEffectsScope.
11107 NoObservableSideEffectsScope no_effects(this);
11108
11109 Type* expected_type = *expected;
11110
11111 // Separate the number type from the rest.
11112 Type* expected_obj =
Ben Murdoch097c5b22016-05-18 11:27:45 +010011113 Type::Intersect(expected_type, Type::NonNumber(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011114 Type* expected_number =
Ben Murdoch097c5b22016-05-18 11:27:45 +010011115 Type::Intersect(expected_type, Type::Number(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011116
11117 // We expect to get a number.
11118 // (We need to check first, since Type::None->Is(Type::Any()) == true.
11119 if (expected_obj->Is(Type::None())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011120 DCHECK(!expected_number->Is(Type::None()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011121 return value;
11122 }
11123
Ben Murdoch097c5b22016-05-18 11:27:45 +010011124 if (expected_obj->Is(Type::Undefined())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011125 // This is already done by HChange.
Ben Murdoch097c5b22016-05-18 11:27:45 +010011126 *expected = Type::Union(expected_number, Type::Number(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011127 return value;
11128 }
11129
11130 return value;
11131}
11132
11133
11134HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
11135 BinaryOperation* expr,
11136 HValue* left,
11137 HValue* right,
11138 PushBeforeSimulateBehavior push_sim_result) {
Ben Murdochc5610432016-08-08 18:44:38 +010011139 Type* left_type = bounds_.get(expr->left()).lower;
11140 Type* right_type = bounds_.get(expr->right()).lower;
11141 Type* result_type = bounds_.get(expr).lower;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011142 Maybe<int> fixed_right_arg = expr->fixed_right_arg();
11143 Handle<AllocationSite> allocation_site = expr->allocation_site();
11144
11145 HAllocationMode allocation_mode;
11146 if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) {
11147 allocation_mode = HAllocationMode(allocation_site);
11148 }
11149 HValue* result = HGraphBuilder::BuildBinaryOperation(
11150 expr->op(), left, right, left_type, right_type, result_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +010011151 fixed_right_arg, allocation_mode, expr->id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011152 // Add a simulate after instructions with observable side effects, and
11153 // after phis, which are the result of BuildBinaryOperation when we
11154 // inlined some complex subgraph.
11155 if (result->HasObservableSideEffects() || result->IsPhi()) {
11156 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
11157 Push(result);
11158 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
11159 Drop(1);
11160 } else {
11161 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
11162 }
11163 }
11164 return result;
11165}
11166
Ben Murdoch097c5b22016-05-18 11:27:45 +010011167HValue* HGraphBuilder::BuildBinaryOperation(Token::Value op, HValue* left,
11168 HValue* right, Type* left_type,
11169 Type* right_type, Type* result_type,
11170 Maybe<int> fixed_right_arg,
11171 HAllocationMode allocation_mode,
11172 BailoutId opt_id) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011173 bool maybe_string_add = false;
11174 if (op == Token::ADD) {
11175 // If we are adding constant string with something for which we don't have
11176 // a feedback yet, assume that it's also going to be a string and don't
11177 // generate deopt instructions.
11178 if (!left_type->IsInhabited() && right->IsConstant() &&
11179 HConstant::cast(right)->HasStringValue()) {
11180 left_type = Type::String();
11181 }
11182
11183 if (!right_type->IsInhabited() && left->IsConstant() &&
11184 HConstant::cast(left)->HasStringValue()) {
11185 right_type = Type::String();
11186 }
11187
11188 maybe_string_add = (left_type->Maybe(Type::String()) ||
11189 left_type->Maybe(Type::Receiver()) ||
11190 right_type->Maybe(Type::String()) ||
11191 right_type->Maybe(Type::Receiver()));
11192 }
11193
11194 Representation left_rep = RepresentationFor(left_type);
11195 Representation right_rep = RepresentationFor(right_type);
11196
11197 if (!left_type->IsInhabited()) {
11198 Add<HDeoptimize>(
11199 Deoptimizer::kInsufficientTypeFeedbackForLHSOfBinaryOperation,
11200 Deoptimizer::SOFT);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011201 left_type = Type::Any();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011202 left_rep = RepresentationFor(left_type);
11203 maybe_string_add = op == Token::ADD;
11204 }
11205
11206 if (!right_type->IsInhabited()) {
11207 Add<HDeoptimize>(
11208 Deoptimizer::kInsufficientTypeFeedbackForRHSOfBinaryOperation,
11209 Deoptimizer::SOFT);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011210 right_type = Type::Any();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011211 right_rep = RepresentationFor(right_type);
11212 maybe_string_add = op == Token::ADD;
11213 }
11214
Ben Murdoch097c5b22016-05-18 11:27:45 +010011215 if (!maybe_string_add) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011216 left = TruncateToNumber(left, &left_type);
11217 right = TruncateToNumber(right, &right_type);
11218 }
11219
11220 // Special case for string addition here.
11221 if (op == Token::ADD &&
11222 (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011223 // Validate type feedback for left argument.
11224 if (left_type->Is(Type::String())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011225 left = BuildCheckString(left);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011226 }
11227
11228 // Validate type feedback for right argument.
11229 if (right_type->Is(Type::String())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011230 right = BuildCheckString(right);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011231 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011232
Ben Murdoch097c5b22016-05-18 11:27:45 +010011233 // Convert left argument as necessary.
11234 if (left_type->Is(Type::Number())) {
11235 DCHECK(right_type->Is(Type::String()));
11236 left = BuildNumberToString(left, left_type);
11237 } else if (!left_type->Is(Type::String())) {
11238 DCHECK(right_type->Is(Type::String()));
11239 return AddUncasted<HStringAdd>(
11240 left, right, allocation_mode.GetPretenureMode(),
11241 STRING_ADD_CONVERT_LEFT, allocation_mode.feedback_site());
11242 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011243
Ben Murdoch097c5b22016-05-18 11:27:45 +010011244 // Convert right argument as necessary.
11245 if (right_type->Is(Type::Number())) {
11246 DCHECK(left_type->Is(Type::String()));
11247 right = BuildNumberToString(right, right_type);
11248 } else if (!right_type->Is(Type::String())) {
11249 DCHECK(left_type->Is(Type::String()));
11250 return AddUncasted<HStringAdd>(
11251 left, right, allocation_mode.GetPretenureMode(),
11252 STRING_ADD_CONVERT_RIGHT, allocation_mode.feedback_site());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011253 }
11254
11255 // Fast paths for empty constant strings.
11256 Handle<String> left_string =
11257 left->IsConstant() && HConstant::cast(left)->HasStringValue()
11258 ? HConstant::cast(left)->StringValue()
11259 : Handle<String>();
11260 Handle<String> right_string =
11261 right->IsConstant() && HConstant::cast(right)->HasStringValue()
11262 ? HConstant::cast(right)->StringValue()
11263 : Handle<String>();
11264 if (!left_string.is_null() && left_string->length() == 0) return right;
11265 if (!right_string.is_null() && right_string->length() == 0) return left;
11266 if (!left_string.is_null() && !right_string.is_null()) {
11267 return AddUncasted<HStringAdd>(
11268 left, right, allocation_mode.GetPretenureMode(),
11269 STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
11270 }
11271
11272 // Register the dependent code with the allocation site.
11273 if (!allocation_mode.feedback_site().is_null()) {
11274 DCHECK(!graph()->info()->IsStub());
11275 Handle<AllocationSite> site(allocation_mode.feedback_site());
11276 top_info()->dependencies()->AssumeTenuringDecision(site);
11277 }
11278
11279 // Inline the string addition into the stub when creating allocation
11280 // mementos to gather allocation site feedback, or if we can statically
11281 // infer that we're going to create a cons string.
11282 if ((graph()->info()->IsStub() &&
11283 allocation_mode.CreateAllocationMementos()) ||
11284 (left->IsConstant() &&
11285 HConstant::cast(left)->HasStringValue() &&
11286 HConstant::cast(left)->StringValue()->length() + 1 >=
11287 ConsString::kMinLength) ||
11288 (right->IsConstant() &&
11289 HConstant::cast(right)->HasStringValue() &&
11290 HConstant::cast(right)->StringValue()->length() + 1 >=
11291 ConsString::kMinLength)) {
11292 return BuildStringAdd(left, right, allocation_mode);
11293 }
11294
11295 // Fallback to using the string add stub.
11296 return AddUncasted<HStringAdd>(
11297 left, right, allocation_mode.GetPretenureMode(), STRING_ADD_CHECK_NONE,
11298 allocation_mode.feedback_site());
11299 }
11300
Ben Murdoch097c5b22016-05-18 11:27:45 +010011301 // Special case for +x here.
11302 if (op == Token::MUL) {
11303 if (left->EqualsInteger32Constant(1)) {
11304 return BuildToNumber(right);
11305 }
11306 if (right->EqualsInteger32Constant(1)) {
11307 return BuildToNumber(left);
11308 }
11309 }
11310
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011311 if (graph()->info()->IsStub()) {
11312 left = EnforceNumberType(left, left_type);
11313 right = EnforceNumberType(right, right_type);
11314 }
11315
11316 Representation result_rep = RepresentationFor(result_type);
11317
11318 bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
11319 (right_rep.IsTagged() && !right_rep.IsSmi());
11320
11321 HInstruction* instr = NULL;
11322 // Only the stub is allowed to call into the runtime, since otherwise we would
11323 // inline several instructions (including the two pushes) for every tagged
11324 // operation in optimized code, which is more expensive, than a stub call.
11325 if (graph()->info()->IsStub() && is_non_primitive) {
Ben Murdochc5610432016-08-08 18:44:38 +010011326 HValue* values[] = {context(), left, right};
11327#define GET_STUB(Name) \
11328 do { \
11329 Callable callable = CodeFactory::Name(isolate()); \
11330 HValue* stub = Add<HConstant>(callable.code()); \
11331 instr = AddUncasted<HCallWithDescriptor>(stub, 0, callable.descriptor(), \
11332 ArrayVector(values)); \
11333 } while (false)
11334
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011335 switch (op) {
11336 default:
11337 UNREACHABLE();
11338 case Token::ADD:
Ben Murdochc5610432016-08-08 18:44:38 +010011339 GET_STUB(Add);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011340 break;
11341 case Token::SUB:
Ben Murdochc5610432016-08-08 18:44:38 +010011342 GET_STUB(Subtract);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011343 break;
11344 case Token::MUL:
Ben Murdochc5610432016-08-08 18:44:38 +010011345 GET_STUB(Multiply);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011346 break;
11347 case Token::DIV:
Ben Murdochc5610432016-08-08 18:44:38 +010011348 GET_STUB(Divide);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011349 break;
11350 case Token::MOD:
Ben Murdochc5610432016-08-08 18:44:38 +010011351 GET_STUB(Modulus);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011352 break;
11353 case Token::BIT_OR:
Ben Murdochc5610432016-08-08 18:44:38 +010011354 GET_STUB(BitwiseOr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011355 break;
11356 case Token::BIT_AND:
Ben Murdochc5610432016-08-08 18:44:38 +010011357 GET_STUB(BitwiseAnd);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011358 break;
11359 case Token::BIT_XOR:
Ben Murdochc5610432016-08-08 18:44:38 +010011360 GET_STUB(BitwiseXor);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011361 break;
11362 case Token::SAR:
Ben Murdochc5610432016-08-08 18:44:38 +010011363 GET_STUB(ShiftRight);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011364 break;
11365 case Token::SHR:
Ben Murdochc5610432016-08-08 18:44:38 +010011366 GET_STUB(ShiftRightLogical);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011367 break;
11368 case Token::SHL:
Ben Murdochc5610432016-08-08 18:44:38 +010011369 GET_STUB(ShiftLeft);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011370 break;
11371 }
Ben Murdochc5610432016-08-08 18:44:38 +010011372#undef GET_STUB
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011373 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011374 switch (op) {
11375 case Token::ADD:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011376 instr = AddUncasted<HAdd>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011377 break;
11378 case Token::SUB:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011379 instr = AddUncasted<HSub>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011380 break;
11381 case Token::MUL:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011382 instr = AddUncasted<HMul>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011383 break;
11384 case Token::MOD: {
11385 if (fixed_right_arg.IsJust() &&
11386 !right->EqualsInteger32Constant(fixed_right_arg.FromJust())) {
11387 HConstant* fixed_right =
11388 Add<HConstant>(static_cast<int>(fixed_right_arg.FromJust()));
11389 IfBuilder if_same(this);
11390 if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
11391 if_same.Then();
11392 if_same.ElseDeopt(Deoptimizer::kUnexpectedRHSOfBinaryOperation);
11393 right = fixed_right;
11394 }
Ben Murdoch097c5b22016-05-18 11:27:45 +010011395 instr = AddUncasted<HMod>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011396 break;
11397 }
11398 case Token::DIV:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011399 instr = AddUncasted<HDiv>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011400 break;
11401 case Token::BIT_XOR:
11402 case Token::BIT_AND:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011403 instr = AddUncasted<HBitwise>(op, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011404 break;
11405 case Token::BIT_OR: {
11406 HValue *operand, *shift_amount;
11407 if (left_type->Is(Type::Signed32()) &&
11408 right_type->Is(Type::Signed32()) &&
11409 MatchRotateRight(left, right, &operand, &shift_amount)) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011410 instr = AddUncasted<HRor>(operand, shift_amount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011411 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011412 instr = AddUncasted<HBitwise>(op, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011413 }
11414 break;
11415 }
11416 case Token::SAR:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011417 instr = AddUncasted<HSar>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011418 break;
11419 case Token::SHR:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011420 instr = AddUncasted<HShr>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011421 if (instr->IsShr() && CanBeZero(right)) {
11422 graph()->RecordUint32Instruction(instr);
11423 }
11424 break;
11425 case Token::SHL:
Ben Murdoch097c5b22016-05-18 11:27:45 +010011426 instr = AddUncasted<HShl>(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011427 break;
11428 default:
11429 UNREACHABLE();
11430 }
11431 }
11432
11433 if (instr->IsBinaryOperation()) {
11434 HBinaryOperation* binop = HBinaryOperation::cast(instr);
11435 binop->set_observed_input_representation(1, left_rep);
11436 binop->set_observed_input_representation(2, right_rep);
11437 binop->initialize_output_representation(result_rep);
11438 if (graph()->info()->IsStub()) {
11439 // Stub should not call into stub.
11440 instr->SetFlag(HValue::kCannotBeTagged);
11441 // And should truncate on HForceRepresentation already.
11442 if (left->IsForceRepresentation()) {
11443 left->CopyFlag(HValue::kTruncatingToSmi, instr);
11444 left->CopyFlag(HValue::kTruncatingToInt32, instr);
11445 }
11446 if (right->IsForceRepresentation()) {
11447 right->CopyFlag(HValue::kTruncatingToSmi, instr);
11448 right->CopyFlag(HValue::kTruncatingToInt32, instr);
11449 }
11450 }
11451 }
11452 return instr;
11453}
11454
11455
11456// Check for the form (%_ClassOf(foo) === 'BarClass').
11457static bool IsClassOfTest(CompareOperation* expr) {
11458 if (expr->op() != Token::EQ_STRICT) return false;
11459 CallRuntime* call = expr->left()->AsCallRuntime();
11460 if (call == NULL) return false;
11461 Literal* literal = expr->right()->AsLiteral();
11462 if (literal == NULL) return false;
11463 if (!literal->value()->IsString()) return false;
11464 if (!call->is_jsruntime() &&
11465 call->function()->function_id != Runtime::kInlineClassOf) {
11466 return false;
11467 }
11468 DCHECK(call->arguments()->length() == 1);
11469 return true;
11470}
11471
11472
11473void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
11474 DCHECK(!HasStackOverflow());
11475 DCHECK(current_block() != NULL);
11476 DCHECK(current_block()->HasPredecessor());
11477 switch (expr->op()) {
11478 case Token::COMMA:
11479 return VisitComma(expr);
11480 case Token::OR:
11481 case Token::AND:
11482 return VisitLogicalExpression(expr);
11483 default:
11484 return VisitArithmeticExpression(expr);
11485 }
11486}
11487
11488
11489void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
11490 CHECK_ALIVE(VisitForEffect(expr->left()));
11491 // Visit the right subexpression in the same AST context as the entire
11492 // expression.
11493 Visit(expr->right());
11494}
11495
11496
11497void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
11498 bool is_logical_and = expr->op() == Token::AND;
11499 if (ast_context()->IsTest()) {
11500 TestContext* context = TestContext::cast(ast_context());
11501 // Translate left subexpression.
11502 HBasicBlock* eval_right = graph()->CreateBasicBlock();
11503 if (is_logical_and) {
11504 CHECK_BAILOUT(VisitForControl(expr->left(),
11505 eval_right,
11506 context->if_false()));
11507 } else {
11508 CHECK_BAILOUT(VisitForControl(expr->left(),
11509 context->if_true(),
11510 eval_right));
11511 }
11512
11513 // Translate right subexpression by visiting it in the same AST
11514 // context as the entire expression.
Ben Murdochda12d292016-06-02 14:46:10 +010011515 CHECK(eval_right->HasPredecessor());
11516 eval_right->SetJoinId(expr->RightId());
11517 set_current_block(eval_right);
11518 Visit(expr->right());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011519 } else if (ast_context()->IsValue()) {
11520 CHECK_ALIVE(VisitForValue(expr->left()));
11521 DCHECK(current_block() != NULL);
11522 HValue* left_value = Top();
11523
11524 // Short-circuit left values that always evaluate to the same boolean value.
11525 if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
11526 // l (evals true) && r -> r
11527 // l (evals true) || r -> l
11528 // l (evals false) && r -> l
11529 // l (evals false) || r -> r
11530 if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
11531 Drop(1);
11532 CHECK_ALIVE(VisitForValue(expr->right()));
11533 }
11534 return ast_context()->ReturnValue(Pop());
11535 }
11536
11537 // We need an extra block to maintain edge-split form.
11538 HBasicBlock* empty_block = graph()->CreateBasicBlock();
11539 HBasicBlock* eval_right = graph()->CreateBasicBlock();
Ben Murdochda12d292016-06-02 14:46:10 +010011540 ToBooleanICStub::Types expected(expr->left()->to_boolean_types());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011541 HBranch* test = is_logical_and
11542 ? New<HBranch>(left_value, expected, eval_right, empty_block)
11543 : New<HBranch>(left_value, expected, empty_block, eval_right);
11544 FinishCurrentBlock(test);
11545
11546 set_current_block(eval_right);
11547 Drop(1); // Value of the left subexpression.
11548 CHECK_BAILOUT(VisitForValue(expr->right()));
11549
11550 HBasicBlock* join_block =
11551 CreateJoin(empty_block, current_block(), expr->id());
11552 set_current_block(join_block);
11553 return ast_context()->ReturnValue(Pop());
11554
11555 } else {
11556 DCHECK(ast_context()->IsEffect());
11557 // In an effect context, we don't need the value of the left subexpression,
11558 // only its control flow and side effects. We need an extra block to
11559 // maintain edge-split form.
11560 HBasicBlock* empty_block = graph()->CreateBasicBlock();
11561 HBasicBlock* right_block = graph()->CreateBasicBlock();
11562 if (is_logical_and) {
11563 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
11564 } else {
11565 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
11566 }
11567
11568 // TODO(kmillikin): Find a way to fix this. It's ugly that there are
11569 // actually two empty blocks (one here and one inserted by
11570 // TestContext::BuildBranch, and that they both have an HSimulate though the
11571 // second one is not a merge node, and that we really have no good AST ID to
11572 // put on that first HSimulate.
11573
Ben Murdochda12d292016-06-02 14:46:10 +010011574 // Technically, we should be able to handle the case when one side of
11575 // the test is not connected, but this can trip up liveness analysis
11576 // if we did not fully connect the test context based on some optimistic
11577 // assumption. If such an assumption was violated, we would end up with
11578 // an environment with optimized-out values. So we should always
11579 // conservatively connect the test context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011580
Ben Murdochda12d292016-06-02 14:46:10 +010011581 CHECK(right_block->HasPredecessor());
11582 CHECK(empty_block->HasPredecessor());
11583
11584 empty_block->SetJoinId(expr->id());
11585
11586 right_block->SetJoinId(expr->RightId());
11587 set_current_block(right_block);
11588 CHECK_BAILOUT(VisitForEffect(expr->right()));
11589 right_block = current_block();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011590
11591 HBasicBlock* join_block =
11592 CreateJoin(empty_block, right_block, expr->id());
11593 set_current_block(join_block);
11594 // We did not materialize any value in the predecessor environments,
11595 // so there is no need to handle it here.
11596 }
11597}
11598
11599
11600void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
11601 CHECK_ALIVE(VisitForValue(expr->left()));
11602 CHECK_ALIVE(VisitForValue(expr->right()));
11603 SetSourcePosition(expr->position());
11604 HValue* right = Pop();
11605 HValue* left = Pop();
11606 HValue* result =
11607 BuildBinaryOperation(expr, left, right,
11608 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11609 : PUSH_BEFORE_SIMULATE);
11610 if (top_info()->is_tracking_positions() && result->IsBinaryOperation()) {
11611 HBinaryOperation::cast(result)->SetOperandPositions(
11612 zone(),
11613 ScriptPositionToSourcePosition(expr->left()->position()),
11614 ScriptPositionToSourcePosition(expr->right()->position()));
11615 }
11616 return ast_context()->ReturnValue(result);
11617}
11618
11619
11620void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
11621 Expression* sub_expr,
11622 Handle<String> check) {
11623 CHECK_ALIVE(VisitForTypeOf(sub_expr));
11624 SetSourcePosition(expr->position());
11625 HValue* value = Pop();
11626 HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
11627 return ast_context()->ReturnControl(instr, expr->id());
11628}
11629
11630
11631static bool IsLiteralCompareBool(Isolate* isolate,
11632 HValue* left,
11633 Token::Value op,
11634 HValue* right) {
11635 return op == Token::EQ_STRICT &&
11636 ((left->IsConstant() &&
11637 HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
11638 (right->IsConstant() &&
11639 HConstant::cast(right)->handle(isolate)->IsBoolean()));
11640}
11641
11642
11643void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
11644 DCHECK(!HasStackOverflow());
11645 DCHECK(current_block() != NULL);
11646 DCHECK(current_block()->HasPredecessor());
11647
11648 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
11649
11650 // Check for a few fast cases. The AST visiting behavior must be in sync
11651 // with the full codegen: We don't push both left and right values onto
11652 // the expression stack when one side is a special-case literal.
11653 Expression* sub_expr = NULL;
11654 Handle<String> check;
11655 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
11656 return HandleLiteralCompareTypeof(expr, sub_expr, check);
11657 }
Ben Murdochda12d292016-06-02 14:46:10 +010011658 if (expr->IsLiteralCompareUndefined(&sub_expr)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011659 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
11660 }
11661 if (expr->IsLiteralCompareNull(&sub_expr)) {
11662 return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
11663 }
11664
11665 if (IsClassOfTest(expr)) {
11666 CallRuntime* call = expr->left()->AsCallRuntime();
11667 DCHECK(call->arguments()->length() == 1);
11668 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11669 HValue* value = Pop();
11670 Literal* literal = expr->right()->AsLiteral();
11671 Handle<String> rhs = Handle<String>::cast(literal->value());
11672 HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
11673 return ast_context()->ReturnControl(instr, expr->id());
11674 }
11675
Ben Murdochc5610432016-08-08 18:44:38 +010011676 Type* left_type = bounds_.get(expr->left()).lower;
11677 Type* right_type = bounds_.get(expr->right()).lower;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011678 Type* combined_type = expr->combined_type();
11679
11680 CHECK_ALIVE(VisitForValue(expr->left()));
11681 CHECK_ALIVE(VisitForValue(expr->right()));
11682
11683 HValue* right = Pop();
11684 HValue* left = Pop();
11685 Token::Value op = expr->op();
11686
11687 if (IsLiteralCompareBool(isolate(), left, op, right)) {
11688 HCompareObjectEqAndBranch* result =
11689 New<HCompareObjectEqAndBranch>(left, right);
11690 return ast_context()->ReturnControl(result, expr->id());
11691 }
11692
11693 if (op == Token::INSTANCEOF) {
11694 // Check to see if the rhs of the instanceof is a known function.
11695 if (right->IsConstant() &&
11696 HConstant::cast(right)->handle(isolate())->IsJSFunction()) {
Ben Murdochc5610432016-08-08 18:44:38 +010011697 Handle<JSFunction> function =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011698 Handle<JSFunction>::cast(HConstant::cast(right)->handle(isolate()));
Ben Murdochc5610432016-08-08 18:44:38 +010011699 // Make sure the prototype of {function} is the %FunctionPrototype%, and
11700 // it already has a meaningful initial map (i.e. we constructed at least
11701 // one instance using the constructor {function}).
11702 // We can only use the fast case if @@hasInstance was not used so far.
11703 if (function->has_initial_map() &&
11704 function->map()->prototype() ==
11705 function->native_context()->closure() &&
11706 !function->map()->has_non_instance_prototype() &&
11707 isolate()->IsHasInstanceLookupChainIntact()) {
11708 Handle<Map> initial_map(function->initial_map(), isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011709 top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
Ben Murdochc5610432016-08-08 18:44:38 +010011710 top_info()->dependencies()->AssumePropertyCell(
11711 isolate()->factory()->has_instance_protector());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011712 HInstruction* prototype =
11713 Add<HConstant>(handle(initial_map->prototype(), isolate()));
11714 HHasInPrototypeChainAndBranch* result =
11715 New<HHasInPrototypeChainAndBranch>(left, prototype);
11716 return ast_context()->ReturnControl(result, expr->id());
11717 }
11718 }
11719
Ben Murdochc5610432016-08-08 18:44:38 +010011720 Callable callable = CodeFactory::InstanceOf(isolate());
11721 HValue* stub = Add<HConstant>(callable.code());
11722 HValue* values[] = {context(), left, right};
11723 HCallWithDescriptor* result = New<HCallWithDescriptor>(
11724 stub, 0, callable.descriptor(), ArrayVector(values));
11725 result->set_type(HType::Boolean());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011726 return ast_context()->ReturnInstruction(result, expr->id());
11727
11728 } else if (op == Token::IN) {
Ben Murdochc5610432016-08-08 18:44:38 +010011729 Callable callable = CodeFactory::HasProperty(isolate());
11730 HValue* stub = Add<HConstant>(callable.code());
11731 HValue* values[] = {context(), left, right};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011732 HInstruction* result =
Ben Murdochc5610432016-08-08 18:44:38 +010011733 New<HCallWithDescriptor>(stub, 0, callable.descriptor(),
11734 Vector<HValue*>(values, arraysize(values)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011735 return ast_context()->ReturnInstruction(result, expr->id());
11736 }
11737
11738 PushBeforeSimulateBehavior push_behavior =
11739 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11740 : PUSH_BEFORE_SIMULATE;
11741 HControlInstruction* compare = BuildCompareInstruction(
11742 op, left, right, left_type, right_type, combined_type,
11743 ScriptPositionToSourcePosition(expr->left()->position()),
11744 ScriptPositionToSourcePosition(expr->right()->position()),
11745 push_behavior, expr->id());
11746 if (compare == NULL) return; // Bailed out.
11747 return ast_context()->ReturnControl(compare, expr->id());
11748}
11749
11750
11751HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
11752 Token::Value op, HValue* left, HValue* right, Type* left_type,
11753 Type* right_type, Type* combined_type, SourcePosition left_position,
11754 SourcePosition right_position, PushBeforeSimulateBehavior push_sim_result,
11755 BailoutId bailout_id) {
11756 // Cases handled below depend on collected type feedback. They should
11757 // soft deoptimize when there is no type feedback.
11758 if (!combined_type->IsInhabited()) {
11759 Add<HDeoptimize>(
11760 Deoptimizer::kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation,
11761 Deoptimizer::SOFT);
Ben Murdoch097c5b22016-05-18 11:27:45 +010011762 combined_type = left_type = right_type = Type::Any();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011763 }
11764
11765 Representation left_rep = RepresentationFor(left_type);
11766 Representation right_rep = RepresentationFor(right_type);
11767 Representation combined_rep = RepresentationFor(combined_type);
11768
11769 if (combined_type->Is(Type::Receiver())) {
11770 if (Token::IsEqualityOp(op)) {
11771 // HCompareObjectEqAndBranch can only deal with object, so
11772 // exclude numbers.
11773 if ((left->IsConstant() &&
11774 HConstant::cast(left)->HasNumberValue()) ||
11775 (right->IsConstant() &&
11776 HConstant::cast(right)->HasNumberValue())) {
11777 Add<HDeoptimize>(Deoptimizer::kTypeMismatchBetweenFeedbackAndConstant,
11778 Deoptimizer::SOFT);
11779 // The caller expects a branch instruction, so make it happy.
11780 return New<HBranch>(graph()->GetConstantTrue());
11781 }
11782 // Can we get away with map check and not instance type check?
11783 HValue* operand_to_check =
11784 left->block()->block_id() < right->block()->block_id() ? left : right;
11785 if (combined_type->IsClass()) {
11786 Handle<Map> map = combined_type->AsClass()->Map();
11787 AddCheckMap(operand_to_check, map);
11788 HCompareObjectEqAndBranch* result =
11789 New<HCompareObjectEqAndBranch>(left, right);
11790 if (top_info()->is_tracking_positions()) {
11791 result->set_operand_position(zone(), 0, left_position);
11792 result->set_operand_position(zone(), 1, right_position);
11793 }
11794 return result;
11795 } else {
11796 BuildCheckHeapObject(operand_to_check);
11797 Add<HCheckInstanceType>(operand_to_check,
11798 HCheckInstanceType::IS_JS_RECEIVER);
11799 HCompareObjectEqAndBranch* result =
11800 New<HCompareObjectEqAndBranch>(left, right);
11801 return result;
11802 }
11803 } else {
11804 if (combined_type->IsClass()) {
11805 // TODO(bmeurer): This is an optimized version of an x < y, x > y,
11806 // x <= y or x >= y, where both x and y are spec objects with the
11807 // same map. The CompareIC collects this map for us. So if we know
11808 // that there's no @@toPrimitive on the map (including the prototype
11809 // chain), and both valueOf and toString are the default initial
11810 // implementations (on the %ObjectPrototype%), then we can reduce
11811 // the comparison to map checks on x and y, because the comparison
11812 // will turn into a comparison of "[object CLASS]" to itself (the
11813 // default outcome of toString, since valueOf returns a spec object).
11814 // This is pretty much adhoc, so in TurboFan we could do a lot better
11815 // and inline the interesting parts of ToPrimitive (actually we could
11816 // even do that in Crankshaft but we don't want to waste too much
11817 // time on this now).
11818 DCHECK(Token::IsOrderedRelationalCompareOp(op));
11819 Handle<Map> map = combined_type->AsClass()->Map();
11820 PropertyAccessInfo value_of(this, LOAD, map,
11821 isolate()->factory()->valueOf_string());
11822 PropertyAccessInfo to_primitive(
11823 this, LOAD, map, isolate()->factory()->to_primitive_symbol());
11824 PropertyAccessInfo to_string(this, LOAD, map,
11825 isolate()->factory()->toString_string());
11826 PropertyAccessInfo to_string_tag(
11827 this, LOAD, map, isolate()->factory()->to_string_tag_symbol());
11828 if (to_primitive.CanAccessMonomorphic() && !to_primitive.IsFound() &&
11829 to_string_tag.CanAccessMonomorphic() &&
11830 (!to_string_tag.IsFound() || to_string_tag.IsData() ||
11831 to_string_tag.IsDataConstant()) &&
11832 value_of.CanAccessMonomorphic() && value_of.IsDataConstant() &&
11833 value_of.constant().is_identical_to(isolate()->object_value_of()) &&
11834 to_string.CanAccessMonomorphic() && to_string.IsDataConstant() &&
11835 to_string.constant().is_identical_to(
11836 isolate()->object_to_string())) {
11837 // We depend on the prototype chain to stay the same, because we
11838 // also need to deoptimize when someone installs @@toPrimitive
11839 // or @@toStringTag somewhere in the prototype chain.
11840 BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
11841 Handle<JSObject>::null());
11842 AddCheckMap(left, map);
11843 AddCheckMap(right, map);
11844 // The caller expects a branch instruction, so make it happy.
11845 return New<HBranch>(
11846 graph()->GetConstantBool(op == Token::LTE || op == Token::GTE));
11847 }
11848 }
11849 Bailout(kUnsupportedNonPrimitiveCompare);
11850 return NULL;
11851 }
11852 } else if (combined_type->Is(Type::InternalizedString()) &&
11853 Token::IsEqualityOp(op)) {
11854 // If we have a constant argument, it should be consistent with the type
11855 // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
11856 if ((left->IsConstant() &&
11857 !HConstant::cast(left)->HasInternalizedStringValue()) ||
11858 (right->IsConstant() &&
11859 !HConstant::cast(right)->HasInternalizedStringValue())) {
11860 Add<HDeoptimize>(Deoptimizer::kTypeMismatchBetweenFeedbackAndConstant,
11861 Deoptimizer::SOFT);
11862 // The caller expects a branch instruction, so make it happy.
11863 return New<HBranch>(graph()->GetConstantTrue());
11864 }
11865 BuildCheckHeapObject(left);
11866 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
11867 BuildCheckHeapObject(right);
11868 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
11869 HCompareObjectEqAndBranch* result =
11870 New<HCompareObjectEqAndBranch>(left, right);
11871 return result;
11872 } else if (combined_type->Is(Type::String())) {
11873 BuildCheckHeapObject(left);
11874 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
11875 BuildCheckHeapObject(right);
11876 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
11877 HStringCompareAndBranch* result =
11878 New<HStringCompareAndBranch>(left, right, op);
11879 return result;
11880 } else if (combined_type->Is(Type::Boolean())) {
11881 AddCheckMap(left, isolate()->factory()->boolean_map());
11882 AddCheckMap(right, isolate()->factory()->boolean_map());
11883 if (Token::IsEqualityOp(op)) {
11884 HCompareObjectEqAndBranch* result =
11885 New<HCompareObjectEqAndBranch>(left, right);
11886 return result;
11887 }
11888 left = Add<HLoadNamedField>(
11889 left, nullptr,
11890 HObjectAccess::ForOddballToNumber(Representation::Smi()));
11891 right = Add<HLoadNamedField>(
11892 right, nullptr,
11893 HObjectAccess::ForOddballToNumber(Representation::Smi()));
11894 HCompareNumericAndBranch* result =
11895 New<HCompareNumericAndBranch>(left, right, op);
11896 return result;
11897 } else {
Ben Murdochda12d292016-06-02 14:46:10 +010011898 if (op == Token::EQ) {
11899 if (left->IsConstant() &&
11900 HConstant::cast(left)->GetInstanceType() == ODDBALL_TYPE &&
11901 HConstant::cast(left)->IsUndetectable()) {
11902 return New<HIsUndetectableAndBranch>(right);
11903 }
11904
11905 if (right->IsConstant() &&
11906 HConstant::cast(right)->GetInstanceType() == ODDBALL_TYPE &&
11907 HConstant::cast(right)->IsUndetectable()) {
11908 return New<HIsUndetectableAndBranch>(left);
11909 }
11910 }
11911
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011912 if (combined_rep.IsTagged() || combined_rep.IsNone()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011913 HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011914 result->set_observed_input_representation(1, left_rep);
11915 result->set_observed_input_representation(2, right_rep);
11916 if (result->HasObservableSideEffects()) {
11917 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
11918 Push(result);
11919 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11920 Drop(1);
11921 } else {
11922 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11923 }
11924 }
11925 // TODO(jkummerow): Can we make this more efficient?
11926 HBranch* branch = New<HBranch>(result);
11927 return branch;
11928 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +010011929 HCompareNumericAndBranch* result =
11930 New<HCompareNumericAndBranch>(left, right, op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011931 result->set_observed_input_representation(left_rep, right_rep);
11932 if (top_info()->is_tracking_positions()) {
11933 result->SetOperandPositions(zone(), left_position, right_position);
11934 }
11935 return result;
11936 }
11937 }
11938}
11939
11940
11941void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
11942 Expression* sub_expr,
11943 NilValue nil) {
11944 DCHECK(!HasStackOverflow());
11945 DCHECK(current_block() != NULL);
11946 DCHECK(current_block()->HasPredecessor());
11947 DCHECK(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
11948 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
11949 CHECK_ALIVE(VisitForValue(sub_expr));
11950 HValue* value = Pop();
Ben Murdochda12d292016-06-02 14:46:10 +010011951 HControlInstruction* instr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011952 if (expr->op() == Token::EQ_STRICT) {
11953 HConstant* nil_constant = nil == kNullValue
11954 ? graph()->GetConstantNull()
11955 : graph()->GetConstantUndefined();
Ben Murdochda12d292016-06-02 14:46:10 +010011956 instr = New<HCompareObjectEqAndBranch>(value, nil_constant);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011957 } else {
11958 DCHECK_EQ(Token::EQ, expr->op());
Ben Murdochda12d292016-06-02 14:46:10 +010011959 instr = New<HIsUndetectableAndBranch>(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011960 }
Ben Murdochda12d292016-06-02 14:46:10 +010011961 return ast_context()->ReturnControl(instr, expr->id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011962}
11963
11964
11965void HOptimizedGraphBuilder::VisitSpread(Spread* expr) { UNREACHABLE(); }
11966
11967
11968void HOptimizedGraphBuilder::VisitEmptyParentheses(EmptyParentheses* expr) {
11969 UNREACHABLE();
11970}
11971
11972
11973HValue* HOptimizedGraphBuilder::AddThisFunction() {
11974 return AddInstruction(BuildThisFunction());
11975}
11976
11977
11978HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
11979 // If we share optimized code between different closures, the
11980 // this-function is not a constant, except inside an inlined body.
11981 if (function_state()->outer() != NULL) {
11982 return New<HConstant>(
11983 function_state()->compilation_info()->closure());
11984 } else {
11985 return New<HThisFunction>();
11986 }
11987}
11988
11989
11990HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
11991 Handle<JSObject> boilerplate_object,
11992 AllocationSiteUsageContext* site_context) {
11993 NoObservableSideEffectsScope no_effects(this);
11994 Handle<Map> initial_map(boilerplate_object->map());
11995 InstanceType instance_type = initial_map->instance_type();
11996 DCHECK(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
11997
11998 HType type = instance_type == JS_ARRAY_TYPE
11999 ? HType::JSArray() : HType::JSObject();
12000 HValue* object_size_constant = Add<HConstant>(initial_map->instance_size());
12001
12002 PretenureFlag pretenure_flag = NOT_TENURED;
12003 Handle<AllocationSite> top_site(*site_context->top(), isolate());
12004 if (FLAG_allocation_site_pretenuring) {
12005 pretenure_flag = top_site->GetPretenureMode();
12006 }
12007
12008 Handle<AllocationSite> current_site(*site_context->current(), isolate());
12009 if (*top_site == *current_site) {
12010 // We install a dependency for pretenuring only on the outermost literal.
12011 top_info()->dependencies()->AssumeTenuringDecision(top_site);
12012 }
12013 top_info()->dependencies()->AssumeTransitionStable(current_site);
12014
Ben Murdochc5610432016-08-08 18:44:38 +010012015 HInstruction* object =
12016 Add<HAllocate>(object_size_constant, type, pretenure_flag, instance_type,
12017 graph()->GetConstant0(), top_site);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012018
12019 // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
12020 // elements array may not get folded into the object. Hence, we set the
12021 // elements pointer to empty fixed array and let store elimination remove
12022 // this store in the folding case.
12023 HConstant* empty_fixed_array = Add<HConstant>(
12024 isolate()->factory()->empty_fixed_array());
12025 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
12026 empty_fixed_array);
12027
12028 BuildEmitObjectHeader(boilerplate_object, object);
12029
12030 // Similarly to the elements pointer, there is no guarantee that all
12031 // property allocations can get folded, so pre-initialize all in-object
12032 // properties to a safe value.
12033 BuildInitializeInobjectProperties(object, initial_map);
12034
12035 Handle<FixedArrayBase> elements(boilerplate_object->elements());
12036 int elements_size = (elements->length() > 0 &&
12037 elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
12038 elements->Size() : 0;
12039
12040 if (pretenure_flag == TENURED &&
12041 elements->map() == isolate()->heap()->fixed_cow_array_map() &&
12042 isolate()->heap()->InNewSpace(*elements)) {
12043 // If we would like to pretenure a fixed cow array, we must ensure that the
12044 // array is already in old space, otherwise we'll create too many old-to-
12045 // new-space pointers (overflowing the store buffer).
12046 elements = Handle<FixedArrayBase>(
12047 isolate()->factory()->CopyAndTenureFixedCOWArray(
12048 Handle<FixedArray>::cast(elements)));
12049 boilerplate_object->set_elements(*elements);
12050 }
12051
12052 HInstruction* object_elements = NULL;
12053 if (elements_size > 0) {
12054 HValue* object_elements_size = Add<HConstant>(elements_size);
12055 InstanceType instance_type = boilerplate_object->HasFastDoubleElements()
12056 ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE;
12057 object_elements = Add<HAllocate>(object_elements_size, HType::HeapObject(),
Ben Murdochc5610432016-08-08 18:44:38 +010012058 pretenure_flag, instance_type,
12059 graph()->GetConstant0(), top_site);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012060 BuildEmitElements(boilerplate_object, elements, object_elements,
12061 site_context);
12062 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
12063 object_elements);
12064 } else {
12065 Handle<Object> elements_field =
12066 Handle<Object>(boilerplate_object->elements(), isolate());
12067 HInstruction* object_elements_cow = Add<HConstant>(elements_field);
12068 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
12069 object_elements_cow);
12070 }
12071
12072 // Copy in-object properties.
12073 if (initial_map->NumberOfFields() != 0 ||
12074 initial_map->unused_property_fields() > 0) {
12075 BuildEmitInObjectProperties(boilerplate_object, object, site_context,
12076 pretenure_flag);
12077 }
12078 return object;
12079}
12080
12081
12082void HOptimizedGraphBuilder::BuildEmitObjectHeader(
12083 Handle<JSObject> boilerplate_object,
12084 HInstruction* object) {
12085 DCHECK(boilerplate_object->properties()->length() == 0);
12086
12087 Handle<Map> boilerplate_object_map(boilerplate_object->map());
12088 AddStoreMapConstant(object, boilerplate_object_map);
12089
12090 Handle<Object> properties_field =
12091 Handle<Object>(boilerplate_object->properties(), isolate());
12092 DCHECK(*properties_field == isolate()->heap()->empty_fixed_array());
12093 HInstruction* properties = Add<HConstant>(properties_field);
12094 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
12095 Add<HStoreNamedField>(object, access, properties);
12096
12097 if (boilerplate_object->IsJSArray()) {
12098 Handle<JSArray> boilerplate_array =
12099 Handle<JSArray>::cast(boilerplate_object);
12100 Handle<Object> length_field =
12101 Handle<Object>(boilerplate_array->length(), isolate());
12102 HInstruction* length = Add<HConstant>(length_field);
12103
12104 DCHECK(boilerplate_array->length()->IsSmi());
12105 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
12106 boilerplate_array->GetElementsKind()), length);
12107 }
12108}
12109
12110
12111void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
12112 Handle<JSObject> boilerplate_object,
12113 HInstruction* object,
12114 AllocationSiteUsageContext* site_context,
12115 PretenureFlag pretenure_flag) {
12116 Handle<Map> boilerplate_map(boilerplate_object->map());
12117 Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
12118 int limit = boilerplate_map->NumberOfOwnDescriptors();
12119
12120 int copied_fields = 0;
12121 for (int i = 0; i < limit; i++) {
12122 PropertyDetails details = descriptors->GetDetails(i);
12123 if (details.type() != DATA) continue;
12124 copied_fields++;
12125 FieldIndex field_index = FieldIndex::ForDescriptor(*boilerplate_map, i);
12126
12127
12128 int property_offset = field_index.offset();
12129 Handle<Name> name(descriptors->GetKey(i));
12130
12131 // The access for the store depends on the type of the boilerplate.
12132 HObjectAccess access = boilerplate_object->IsJSArray() ?
12133 HObjectAccess::ForJSArrayOffset(property_offset) :
12134 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
12135
12136 if (boilerplate_object->IsUnboxedDoubleField(field_index)) {
12137 CHECK(!boilerplate_object->IsJSArray());
12138 double value = boilerplate_object->RawFastDoublePropertyAt(field_index);
12139 access = access.WithRepresentation(Representation::Double());
12140 Add<HStoreNamedField>(object, access, Add<HConstant>(value));
12141 continue;
12142 }
12143 Handle<Object> value(boilerplate_object->RawFastPropertyAt(field_index),
12144 isolate());
12145
12146 if (value->IsJSObject()) {
12147 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
12148 Handle<AllocationSite> current_site = site_context->EnterNewScope();
12149 HInstruction* result =
12150 BuildFastLiteral(value_object, site_context);
12151 site_context->ExitScope(current_site, value_object);
12152 Add<HStoreNamedField>(object, access, result);
12153 } else {
12154 Representation representation = details.representation();
12155 HInstruction* value_instruction;
12156
12157 if (representation.IsDouble()) {
12158 // Allocate a HeapNumber box and store the value into it.
12159 HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
Ben Murdochc5610432016-08-08 18:44:38 +010012160 HInstruction* double_box = Add<HAllocate>(
12161 heap_number_constant, HType::HeapObject(), pretenure_flag,
12162 MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012163 AddStoreMapConstant(double_box,
12164 isolate()->factory()->mutable_heap_number_map());
12165 // Unwrap the mutable heap number from the boilerplate.
12166 HValue* double_value =
12167 Add<HConstant>(Handle<HeapNumber>::cast(value)->value());
12168 Add<HStoreNamedField>(
12169 double_box, HObjectAccess::ForHeapNumberValue(), double_value);
12170 value_instruction = double_box;
12171 } else if (representation.IsSmi()) {
12172 value_instruction = value->IsUninitialized()
12173 ? graph()->GetConstant0()
12174 : Add<HConstant>(value);
12175 // Ensure that value is stored as smi.
12176 access = access.WithRepresentation(representation);
12177 } else {
12178 value_instruction = Add<HConstant>(value);
12179 }
12180
12181 Add<HStoreNamedField>(object, access, value_instruction);
12182 }
12183 }
12184
12185 int inobject_properties = boilerplate_object->map()->GetInObjectProperties();
12186 HInstruction* value_instruction =
12187 Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
12188 for (int i = copied_fields; i < inobject_properties; i++) {
12189 DCHECK(boilerplate_object->IsJSObject());
12190 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
12191 HObjectAccess access =
12192 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
12193 Add<HStoreNamedField>(object, access, value_instruction);
12194 }
12195}
12196
12197
12198void HOptimizedGraphBuilder::BuildEmitElements(
12199 Handle<JSObject> boilerplate_object,
12200 Handle<FixedArrayBase> elements,
12201 HValue* object_elements,
12202 AllocationSiteUsageContext* site_context) {
12203 ElementsKind kind = boilerplate_object->map()->elements_kind();
12204 int elements_length = elements->length();
12205 HValue* object_elements_length = Add<HConstant>(elements_length);
12206 BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
12207
12208 // Copy elements backing store content.
12209 if (elements->IsFixedDoubleArray()) {
12210 BuildEmitFixedDoubleArray(elements, kind, object_elements);
12211 } else if (elements->IsFixedArray()) {
12212 BuildEmitFixedArray(elements, kind, object_elements,
12213 site_context);
12214 } else {
12215 UNREACHABLE();
12216 }
12217}
12218
12219
12220void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
12221 Handle<FixedArrayBase> elements,
12222 ElementsKind kind,
12223 HValue* object_elements) {
12224 HInstruction* boilerplate_elements = Add<HConstant>(elements);
12225 int elements_length = elements->length();
12226 for (int i = 0; i < elements_length; i++) {
12227 HValue* key_constant = Add<HConstant>(i);
12228 HInstruction* value_instruction =
12229 Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
12230 kind, ALLOW_RETURN_HOLE);
12231 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
12232 value_instruction, nullptr, kind);
12233 store->SetFlag(HValue::kAllowUndefinedAsNaN);
12234 }
12235}
12236
12237
12238void HOptimizedGraphBuilder::BuildEmitFixedArray(
12239 Handle<FixedArrayBase> elements,
12240 ElementsKind kind,
12241 HValue* object_elements,
12242 AllocationSiteUsageContext* site_context) {
12243 HInstruction* boilerplate_elements = Add<HConstant>(elements);
12244 int elements_length = elements->length();
12245 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
12246 for (int i = 0; i < elements_length; i++) {
12247 Handle<Object> value(fast_elements->get(i), isolate());
12248 HValue* key_constant = Add<HConstant>(i);
12249 if (value->IsJSObject()) {
12250 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
12251 Handle<AllocationSite> current_site = site_context->EnterNewScope();
12252 HInstruction* result =
12253 BuildFastLiteral(value_object, site_context);
12254 site_context->ExitScope(current_site, value_object);
12255 Add<HStoreKeyed>(object_elements, key_constant, result, nullptr, kind);
12256 } else {
12257 ElementsKind copy_kind =
12258 kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
12259 HInstruction* value_instruction =
12260 Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
12261 copy_kind, ALLOW_RETURN_HOLE);
12262 Add<HStoreKeyed>(object_elements, key_constant, value_instruction,
12263 nullptr, copy_kind);
12264 }
12265 }
12266}
12267
12268
12269void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
12270 DCHECK(!HasStackOverflow());
12271 DCHECK(current_block() != NULL);
12272 DCHECK(current_block()->HasPredecessor());
12273 HInstruction* instr = BuildThisFunction();
12274 return ast_context()->ReturnInstruction(instr, expr->id());
12275}
12276
12277
12278void HOptimizedGraphBuilder::VisitSuperPropertyReference(
12279 SuperPropertyReference* expr) {
12280 DCHECK(!HasStackOverflow());
12281 DCHECK(current_block() != NULL);
12282 DCHECK(current_block()->HasPredecessor());
12283 return Bailout(kSuperReference);
12284}
12285
12286
12287void HOptimizedGraphBuilder::VisitSuperCallReference(SuperCallReference* expr) {
12288 DCHECK(!HasStackOverflow());
12289 DCHECK(current_block() != NULL);
12290 DCHECK(current_block()->HasPredecessor());
12291 return Bailout(kSuperReference);
12292}
12293
12294
12295void HOptimizedGraphBuilder::VisitDeclarations(
12296 ZoneList<Declaration*>* declarations) {
12297 DCHECK(globals_.is_empty());
12298 AstVisitor::VisitDeclarations(declarations);
12299 if (!globals_.is_empty()) {
12300 Handle<FixedArray> array =
12301 isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
12302 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
Ben Murdochc5610432016-08-08 18:44:38 +010012303 int flags = current_info()->GetDeclareGlobalsFlags();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012304 Add<HDeclareGlobals>(array, flags);
12305 globals_.Rewind(0);
12306 }
12307}
12308
12309
12310void HOptimizedGraphBuilder::VisitVariableDeclaration(
12311 VariableDeclaration* declaration) {
12312 VariableProxy* proxy = declaration->proxy();
12313 VariableMode mode = declaration->mode();
12314 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +010012315 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012316 switch (variable->location()) {
12317 case VariableLocation::GLOBAL:
12318 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +010012319 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012320 globals_.Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +010012321 globals_.Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012322 return;
12323 case VariableLocation::PARAMETER:
12324 case VariableLocation::LOCAL:
12325 if (hole_init) {
12326 HValue* value = graph()->GetConstantHole();
12327 environment()->Bind(variable, value);
12328 }
12329 break;
12330 case VariableLocation::CONTEXT:
12331 if (hole_init) {
12332 HValue* value = graph()->GetConstantHole();
12333 HValue* context = environment()->context();
12334 HStoreContextSlot* store = Add<HStoreContextSlot>(
12335 context, variable->index(), HStoreContextSlot::kNoCheck, value);
12336 if (store->HasObservableSideEffects()) {
12337 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
12338 }
12339 }
12340 break;
12341 case VariableLocation::LOOKUP:
12342 return Bailout(kUnsupportedLookupSlotInDeclaration);
12343 }
12344}
12345
12346
12347void HOptimizedGraphBuilder::VisitFunctionDeclaration(
12348 FunctionDeclaration* declaration) {
12349 VariableProxy* proxy = declaration->proxy();
12350 Variable* variable = proxy->var();
12351 switch (variable->location()) {
12352 case VariableLocation::GLOBAL:
12353 case VariableLocation::UNALLOCATED: {
12354 globals_.Add(variable->name(), zone());
12355 Handle<SharedFunctionInfo> function = Compiler::GetSharedFunctionInfo(
12356 declaration->fun(), current_info()->script(), top_info());
12357 // Check for stack-overflow exception.
12358 if (function.is_null()) return SetStackOverflow();
12359 globals_.Add(function, zone());
12360 return;
12361 }
12362 case VariableLocation::PARAMETER:
12363 case VariableLocation::LOCAL: {
12364 CHECK_ALIVE(VisitForValue(declaration->fun()));
12365 HValue* value = Pop();
12366 BindIfLive(variable, value);
12367 break;
12368 }
12369 case VariableLocation::CONTEXT: {
12370 CHECK_ALIVE(VisitForValue(declaration->fun()));
12371 HValue* value = Pop();
12372 HValue* context = environment()->context();
12373 HStoreContextSlot* store = Add<HStoreContextSlot>(
12374 context, variable->index(), HStoreContextSlot::kNoCheck, value);
12375 if (store->HasObservableSideEffects()) {
12376 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
12377 }
12378 break;
12379 }
12380 case VariableLocation::LOOKUP:
12381 return Bailout(kUnsupportedLookupSlotInDeclaration);
12382 }
12383}
12384
12385
12386void HOptimizedGraphBuilder::VisitImportDeclaration(
12387 ImportDeclaration* declaration) {
12388 UNREACHABLE();
12389}
12390
12391
12392void HOptimizedGraphBuilder::VisitExportDeclaration(
12393 ExportDeclaration* declaration) {
12394 UNREACHABLE();
12395}
12396
12397
Ben Murdoch097c5b22016-05-18 11:27:45 +010012398void HOptimizedGraphBuilder::VisitRewritableExpression(
12399 RewritableExpression* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012400 CHECK_ALIVE(Visit(node->expression()));
12401}
12402
12403
12404// Generators for inline runtime functions.
12405// Support for types.
12406void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
12407 DCHECK(call->arguments()->length() == 1);
12408 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12409 HValue* value = Pop();
12410 HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
12411 return ast_context()->ReturnControl(result, call->id());
12412}
12413
12414
12415void HOptimizedGraphBuilder::GenerateIsJSReceiver(CallRuntime* call) {
12416 DCHECK(call->arguments()->length() == 1);
12417 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12418 HValue* value = Pop();
12419 HHasInstanceTypeAndBranch* result =
12420 New<HHasInstanceTypeAndBranch>(value,
12421 FIRST_JS_RECEIVER_TYPE,
12422 LAST_JS_RECEIVER_TYPE);
12423 return ast_context()->ReturnControl(result, call->id());
12424}
12425
12426
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012427void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
12428 DCHECK(call->arguments()->length() == 1);
12429 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12430 HValue* value = Pop();
12431 HHasCachedArrayIndexAndBranch* result =
12432 New<HHasCachedArrayIndexAndBranch>(value);
12433 return ast_context()->ReturnControl(result, call->id());
12434}
12435
12436
12437void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
12438 DCHECK(call->arguments()->length() == 1);
12439 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12440 HValue* value = Pop();
12441 HHasInstanceTypeAndBranch* result =
12442 New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
12443 return ast_context()->ReturnControl(result, call->id());
12444}
12445
12446
12447void HOptimizedGraphBuilder::GenerateIsTypedArray(CallRuntime* call) {
12448 DCHECK(call->arguments()->length() == 1);
12449 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12450 HValue* value = Pop();
12451 HHasInstanceTypeAndBranch* result =
12452 New<HHasInstanceTypeAndBranch>(value, JS_TYPED_ARRAY_TYPE);
12453 return ast_context()->ReturnControl(result, call->id());
12454}
12455
12456
12457void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
12458 DCHECK(call->arguments()->length() == 1);
12459 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12460 HValue* value = Pop();
12461 HHasInstanceTypeAndBranch* result =
12462 New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
12463 return ast_context()->ReturnControl(result, call->id());
12464}
12465
12466
12467void HOptimizedGraphBuilder::GenerateToInteger(CallRuntime* call) {
12468 DCHECK_EQ(1, call->arguments()->length());
12469 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12470 HValue* input = Pop();
12471 if (input->type().IsSmi()) {
12472 return ast_context()->ReturnValue(input);
12473 } else {
Ben Murdochda12d292016-06-02 14:46:10 +010012474 Callable callable = CodeFactory::ToInteger(isolate());
12475 HValue* stub = Add<HConstant>(callable.code());
12476 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +010012477 HInstruction* result = New<HCallWithDescriptor>(
12478 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdochda12d292016-06-02 14:46:10 +010012479 return ast_context()->ReturnInstruction(result, call->id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012480 }
12481}
12482
12483
Ben Murdoch097c5b22016-05-18 11:27:45 +010012484void HOptimizedGraphBuilder::GenerateToName(CallRuntime* call) {
12485 DCHECK_EQ(1, call->arguments()->length());
12486 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12487 HValue* input = Pop();
12488 if (input->type().IsSmi()) {
12489 HValue* result = BuildNumberToString(input, Type::SignedSmall());
12490 return ast_context()->ReturnValue(result);
12491 } else if (input->type().IsTaggedNumber()) {
12492 HValue* result = BuildNumberToString(input, Type::Number());
12493 return ast_context()->ReturnValue(result);
12494 } else if (input->type().IsString()) {
12495 return ast_context()->ReturnValue(input);
12496 } else {
12497 Callable callable = CodeFactory::ToName(isolate());
12498 HValue* stub = Add<HConstant>(callable.code());
12499 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +010012500 HInstruction* result = New<HCallWithDescriptor>(
12501 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch097c5b22016-05-18 11:27:45 +010012502 return ast_context()->ReturnInstruction(result, call->id());
12503 }
12504}
12505
12506
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012507void HOptimizedGraphBuilder::GenerateToObject(CallRuntime* call) {
12508 DCHECK_EQ(1, call->arguments()->length());
12509 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12510 HValue* value = Pop();
12511 HValue* result = BuildToObject(value);
12512 return ast_context()->ReturnValue(result);
12513}
12514
12515
12516void HOptimizedGraphBuilder::GenerateToString(CallRuntime* call) {
12517 DCHECK_EQ(1, call->arguments()->length());
12518 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012519 HValue* input = Pop();
12520 if (input->type().IsString()) {
12521 return ast_context()->ReturnValue(input);
12522 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +010012523 Callable callable = CodeFactory::ToString(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012524 HValue* stub = Add<HConstant>(callable.code());
12525 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +010012526 HInstruction* result = New<HCallWithDescriptor>(
12527 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012528 return ast_context()->ReturnInstruction(result, call->id());
12529 }
12530}
12531
12532
12533void HOptimizedGraphBuilder::GenerateToLength(CallRuntime* call) {
12534 DCHECK_EQ(1, call->arguments()->length());
12535 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12536 Callable callable = CodeFactory::ToLength(isolate());
12537 HValue* input = Pop();
12538 HValue* stub = Add<HConstant>(callable.code());
12539 HValue* values[] = {context(), input};
Ben Murdochc5610432016-08-08 18:44:38 +010012540 HInstruction* result = New<HCallWithDescriptor>(
12541 stub, 0, callable.descriptor(), ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012542 return ast_context()->ReturnInstruction(result, call->id());
12543}
12544
12545
12546void HOptimizedGraphBuilder::GenerateToNumber(CallRuntime* call) {
12547 DCHECK_EQ(1, call->arguments()->length());
12548 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12549 Callable callable = CodeFactory::ToNumber(isolate());
12550 HValue* input = Pop();
Ben Murdoch097c5b22016-05-18 11:27:45 +010012551 HValue* result = BuildToNumber(input);
12552 if (result->HasObservableSideEffects()) {
12553 if (!ast_context()->IsEffect()) Push(result);
12554 Add<HSimulate>(call->id(), REMOVABLE_SIMULATE);
12555 if (!ast_context()->IsEffect()) result = Pop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012556 }
Ben Murdoch097c5b22016-05-18 11:27:45 +010012557 return ast_context()->ReturnValue(result);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012558}
12559
12560
12561void HOptimizedGraphBuilder::GenerateIsJSProxy(CallRuntime* call) {
12562 DCHECK(call->arguments()->length() == 1);
12563 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12564 HValue* value = Pop();
12565 HIfContinuation continuation;
12566 IfBuilder if_proxy(this);
12567
12568 HValue* smicheck = if_proxy.IfNot<HIsSmiAndBranch>(value);
12569 if_proxy.And();
12570 HValue* map = Add<HLoadNamedField>(value, smicheck, HObjectAccess::ForMap());
12571 HValue* instance_type =
12572 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
12573 if_proxy.If<HCompareNumericAndBranch>(
12574 instance_type, Add<HConstant>(JS_PROXY_TYPE), Token::EQ);
12575
12576 if_proxy.CaptureContinuation(&continuation);
12577 return ast_context()->ReturnContinuation(&continuation, call->id());
12578}
12579
12580
12581void HOptimizedGraphBuilder::GenerateHasFastPackedElements(CallRuntime* call) {
12582 DCHECK(call->arguments()->length() == 1);
12583 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12584 HValue* object = Pop();
12585 HIfContinuation continuation(graph()->CreateBasicBlock(),
12586 graph()->CreateBasicBlock());
12587 IfBuilder if_not_smi(this);
12588 if_not_smi.IfNot<HIsSmiAndBranch>(object);
12589 if_not_smi.Then();
12590 {
12591 NoObservableSideEffectsScope no_effects(this);
12592
12593 IfBuilder if_fast_packed(this);
12594 HValue* elements_kind = BuildGetElementsKind(object);
12595 if_fast_packed.If<HCompareNumericAndBranch>(
12596 elements_kind, Add<HConstant>(FAST_SMI_ELEMENTS), Token::EQ);
12597 if_fast_packed.Or();
12598 if_fast_packed.If<HCompareNumericAndBranch>(
12599 elements_kind, Add<HConstant>(FAST_ELEMENTS), Token::EQ);
12600 if_fast_packed.Or();
12601 if_fast_packed.If<HCompareNumericAndBranch>(
12602 elements_kind, Add<HConstant>(FAST_DOUBLE_ELEMENTS), Token::EQ);
12603 if_fast_packed.JoinContinuation(&continuation);
12604 }
12605 if_not_smi.JoinContinuation(&continuation);
12606 return ast_context()->ReturnContinuation(&continuation, call->id());
12607}
12608
12609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012610void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
12611 DCHECK(call->arguments()->length() == 1);
12612 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12613 HValue* object = Pop();
12614
12615 IfBuilder if_objectisvalue(this);
12616 HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
12617 object, JS_VALUE_TYPE);
12618 if_objectisvalue.Then();
12619 {
12620 // Return the actual value.
12621 Push(Add<HLoadNamedField>(
12622 object, objectisvalue,
12623 HObjectAccess::ForObservableJSObjectOffset(
12624 JSValue::kValueOffset)));
12625 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12626 }
12627 if_objectisvalue.Else();
12628 {
12629 // If the object is not a value return the object.
12630 Push(object);
12631 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12632 }
12633 if_objectisvalue.End();
12634 return ast_context()->ReturnValue(Pop());
12635}
12636
12637
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012638void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
12639 CallRuntime* call) {
12640 DCHECK(call->arguments()->length() == 3);
12641 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12642 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12643 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12644 HValue* string = Pop();
12645 HValue* value = Pop();
12646 HValue* index = Pop();
12647 Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
12648 index, value);
12649 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12650 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12651}
12652
12653
12654void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
12655 CallRuntime* call) {
12656 DCHECK(call->arguments()->length() == 3);
12657 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12658 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12659 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12660 HValue* string = Pop();
12661 HValue* value = Pop();
12662 HValue* index = Pop();
12663 Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
12664 index, value);
12665 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12666 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12667}
12668
12669
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012670// Fast support for charCodeAt(n).
12671void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
12672 DCHECK(call->arguments()->length() == 2);
12673 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12674 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12675 HValue* index = Pop();
12676 HValue* string = Pop();
12677 HInstruction* result = BuildStringCharCodeAt(string, index);
12678 return ast_context()->ReturnInstruction(result, call->id());
12679}
12680
12681
12682// Fast support for string.charAt(n) and string[n].
12683void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
12684 DCHECK(call->arguments()->length() == 1);
12685 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12686 HValue* char_code = Pop();
12687 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
12688 return ast_context()->ReturnInstruction(result, call->id());
12689}
12690
12691
12692// Fast support for string.charAt(n) and string[n].
12693void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
12694 DCHECK(call->arguments()->length() == 2);
12695 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12696 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12697 HValue* index = Pop();
12698 HValue* string = Pop();
12699 HInstruction* char_code = BuildStringCharCodeAt(string, index);
12700 AddInstruction(char_code);
12701 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
12702 return ast_context()->ReturnInstruction(result, call->id());
12703}
12704
12705
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012706// Fast support for SubString.
12707void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
12708 DCHECK_EQ(3, call->arguments()->length());
12709 CHECK_ALIVE(VisitExpressions(call->arguments()));
12710 PushArgumentsFromEnvironment(call->arguments()->length());
Ben Murdoch097c5b22016-05-18 11:27:45 +010012711 Callable callable = CodeFactory::SubString(isolate());
12712 HValue* stub = Add<HConstant>(callable.code());
12713 HValue* values[] = {context()};
Ben Murdochc5610432016-08-08 18:44:38 +010012714 HInstruction* result =
12715 New<HCallWithDescriptor>(stub, call->arguments()->length(),
12716 callable.descriptor(), ArrayVector(values));
Ben Murdoch097c5b22016-05-18 11:27:45 +010012717 result->set_type(HType::String());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012718 return ast_context()->ReturnInstruction(result, call->id());
12719}
12720
Ben Murdochda12d292016-06-02 14:46:10 +010012721// Support for direct creation of new objects.
12722void HOptimizedGraphBuilder::GenerateNewObject(CallRuntime* call) {
12723 DCHECK_EQ(2, call->arguments()->length());
12724 CHECK_ALIVE(VisitExpressions(call->arguments()));
12725 FastNewObjectStub stub(isolate());
12726 FastNewObjectDescriptor descriptor(isolate());
12727 HValue* values[] = {context(), Pop(), Pop()};
12728 HConstant* stub_value = Add<HConstant>(stub.GetCode());
Ben Murdochc5610432016-08-08 18:44:38 +010012729 HInstruction* result =
12730 New<HCallWithDescriptor>(stub_value, 0, descriptor, ArrayVector(values));
Ben Murdochda12d292016-06-02 14:46:10 +010012731 return ast_context()->ReturnInstruction(result, call->id());
12732}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012733
12734// Support for direct calls from JavaScript to native RegExp code.
12735void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
12736 DCHECK_EQ(4, call->arguments()->length());
12737 CHECK_ALIVE(VisitExpressions(call->arguments()));
12738 PushArgumentsFromEnvironment(call->arguments()->length());
Ben Murdoch097c5b22016-05-18 11:27:45 +010012739 Callable callable = CodeFactory::RegExpExec(isolate());
12740 HValue* stub = Add<HConstant>(callable.code());
12741 HValue* values[] = {context()};
Ben Murdochc5610432016-08-08 18:44:38 +010012742 HInstruction* result =
12743 New<HCallWithDescriptor>(stub, call->arguments()->length(),
12744 callable.descriptor(), ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012745 return ast_context()->ReturnInstruction(result, call->id());
12746}
12747
12748
12749void HOptimizedGraphBuilder::GenerateRegExpFlags(CallRuntime* call) {
12750 DCHECK_EQ(1, call->arguments()->length());
12751 CHECK_ALIVE(VisitExpressions(call->arguments()));
12752 HValue* regexp = Pop();
12753 HInstruction* result =
12754 New<HLoadNamedField>(regexp, nullptr, HObjectAccess::ForJSRegExpFlags());
12755 return ast_context()->ReturnInstruction(result, call->id());
12756}
12757
12758
12759void HOptimizedGraphBuilder::GenerateRegExpSource(CallRuntime* call) {
12760 DCHECK_EQ(1, call->arguments()->length());
12761 CHECK_ALIVE(VisitExpressions(call->arguments()));
12762 HValue* regexp = Pop();
12763 HInstruction* result =
12764 New<HLoadNamedField>(regexp, nullptr, HObjectAccess::ForJSRegExpSource());
12765 return ast_context()->ReturnInstruction(result, call->id());
12766}
12767
12768
12769void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
12770 DCHECK_EQ(1, call->arguments()->length());
12771 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12772 HValue* value = Pop();
12773 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
12774 return ast_context()->ReturnInstruction(result, call->id());
12775}
12776
12777
12778void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
12779 DCHECK_EQ(1, call->arguments()->length());
12780 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12781 HValue* value = Pop();
12782 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
12783 return ast_context()->ReturnInstruction(result, call->id());
12784}
12785
12786
12787void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
12788 DCHECK_EQ(2, call->arguments()->length());
12789 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12790 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12791 HValue* lo = Pop();
12792 HValue* hi = Pop();
12793 HInstruction* result = NewUncasted<HConstructDouble>(hi, lo);
12794 return ast_context()->ReturnInstruction(result, call->id());
12795}
12796
12797
12798// Construct a RegExp exec result with two in-object properties.
12799void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
12800 DCHECK_EQ(3, call->arguments()->length());
12801 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12802 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12803 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12804 HValue* input = Pop();
12805 HValue* index = Pop();
12806 HValue* length = Pop();
12807 HValue* result = BuildRegExpConstructResult(length, index, input);
12808 return ast_context()->ReturnValue(result);
12809}
12810
12811
12812// Fast support for number to string.
12813void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
12814 DCHECK_EQ(1, call->arguments()->length());
12815 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12816 HValue* number = Pop();
Ben Murdoch097c5b22016-05-18 11:27:45 +010012817 HValue* result = BuildNumberToString(number, Type::Any());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012818 return ast_context()->ReturnValue(result);
12819}
12820
12821
12822// Fast support for calls.
12823void HOptimizedGraphBuilder::GenerateCall(CallRuntime* call) {
12824 DCHECK_LE(2, call->arguments()->length());
12825 CHECK_ALIVE(VisitExpressions(call->arguments()));
12826 CallTrampolineDescriptor descriptor(isolate());
12827 PushArgumentsFromEnvironment(call->arguments()->length() - 1);
12828 HValue* trampoline = Add<HConstant>(isolate()->builtins()->Call());
12829 HValue* target = Pop();
12830 HValue* values[] = {context(), target,
12831 Add<HConstant>(call->arguments()->length() - 2)};
Ben Murdochc5610432016-08-08 18:44:38 +010012832 HInstruction* result =
12833 New<HCallWithDescriptor>(trampoline, call->arguments()->length() - 1,
12834 descriptor, ArrayVector(values));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012835 return ast_context()->ReturnInstruction(result, call->id());
12836}
12837
12838
12839// Fast call to math functions.
12840void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
12841 DCHECK_EQ(2, call->arguments()->length());
12842 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12843 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12844 HValue* right = Pop();
12845 HValue* left = Pop();
12846 HInstruction* result = NewUncasted<HPower>(left, right);
12847 return ast_context()->ReturnInstruction(result, call->id());
12848}
12849
12850
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012851void HOptimizedGraphBuilder::GenerateMathLogRT(CallRuntime* call) {
12852 DCHECK(call->arguments()->length() == 1);
12853 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12854 HValue* value = Pop();
12855 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
12856 return ast_context()->ReturnInstruction(result, call->id());
12857}
12858
12859
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012860void HOptimizedGraphBuilder::GenerateFixedArrayGet(CallRuntime* call) {
12861 DCHECK(call->arguments()->length() == 2);
12862 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12863 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12864 HValue* index = Pop();
12865 HValue* object = Pop();
12866 HInstruction* result = New<HLoadKeyed>(
12867 object, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
12868 return ast_context()->ReturnInstruction(result, call->id());
12869}
12870
12871
12872void HOptimizedGraphBuilder::GenerateFixedArraySet(CallRuntime* call) {
12873 DCHECK(call->arguments()->length() == 3);
12874 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12875 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12876 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12877 HValue* value = Pop();
12878 HValue* index = Pop();
12879 HValue* object = Pop();
12880 NoObservableSideEffectsScope no_effects(this);
12881 Add<HStoreKeyed>(object, index, value, nullptr, FAST_HOLEY_ELEMENTS);
12882 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12883}
12884
12885
12886void HOptimizedGraphBuilder::GenerateTheHole(CallRuntime* call) {
12887 DCHECK(call->arguments()->length() == 0);
12888 return ast_context()->ReturnValue(graph()->GetConstantHole());
12889}
12890
12891
12892void HOptimizedGraphBuilder::GenerateCreateIterResultObject(CallRuntime* call) {
12893 DCHECK_EQ(2, call->arguments()->length());
12894 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12895 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12896 HValue* done = Pop();
12897 HValue* value = Pop();
12898 HValue* result = BuildCreateIterResultObject(value, done);
12899 return ast_context()->ReturnValue(result);
12900}
12901
12902
12903void HOptimizedGraphBuilder::GenerateJSCollectionGetTable(CallRuntime* call) {
12904 DCHECK(call->arguments()->length() == 1);
12905 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12906 HValue* receiver = Pop();
12907 HInstruction* result = New<HLoadNamedField>(
12908 receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12909 return ast_context()->ReturnInstruction(result, call->id());
12910}
12911
12912
12913void HOptimizedGraphBuilder::GenerateStringGetRawHashField(CallRuntime* call) {
12914 DCHECK(call->arguments()->length() == 1);
12915 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12916 HValue* object = Pop();
12917 HInstruction* result = New<HLoadNamedField>(
12918 object, nullptr, HObjectAccess::ForStringHashField());
12919 return ast_context()->ReturnInstruction(result, call->id());
12920}
12921
12922
12923template <typename CollectionType>
12924HValue* HOptimizedGraphBuilder::BuildAllocateOrderedHashTable() {
12925 static const int kCapacity = CollectionType::kMinCapacity;
12926 static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
12927 static const int kFixedArrayLength = CollectionType::kHashTableStartIndex +
12928 kBucketCount +
12929 (kCapacity * CollectionType::kEntrySize);
12930 static const int kSizeInBytes =
12931 FixedArray::kHeaderSize + (kFixedArrayLength * kPointerSize);
12932
12933 // Allocate the table and add the proper map.
12934 HValue* table =
12935 Add<HAllocate>(Add<HConstant>(kSizeInBytes), HType::HeapObject(),
Ben Murdochc5610432016-08-08 18:44:38 +010012936 NOT_TENURED, FIXED_ARRAY_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012937 AddStoreMapConstant(table, isolate()->factory()->ordered_hash_table_map());
12938
12939 // Initialize the FixedArray...
12940 HValue* length = Add<HConstant>(kFixedArrayLength);
12941 Add<HStoreNamedField>(table, HObjectAccess::ForFixedArrayLength(), length);
12942
12943 // ...and the OrderedHashTable fields.
12944 Add<HStoreNamedField>(
12945 table,
12946 HObjectAccess::ForOrderedHashTableNumberOfBuckets<CollectionType>(),
12947 Add<HConstant>(kBucketCount));
12948 Add<HStoreNamedField>(
12949 table,
12950 HObjectAccess::ForOrderedHashTableNumberOfElements<CollectionType>(),
12951 graph()->GetConstant0());
12952 Add<HStoreNamedField>(
12953 table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12954 CollectionType>(),
12955 graph()->GetConstant0());
12956
12957 // Fill the buckets with kNotFound.
12958 HValue* not_found = Add<HConstant>(CollectionType::kNotFound);
12959 for (int i = 0; i < kBucketCount; ++i) {
12960 Add<HStoreNamedField>(
12961 table, HObjectAccess::ForOrderedHashTableBucket<CollectionType>(i),
12962 not_found);
12963 }
12964
12965 // Fill the data table with undefined.
12966 HValue* undefined = graph()->GetConstantUndefined();
12967 for (int i = 0; i < (kCapacity * CollectionType::kEntrySize); ++i) {
12968 Add<HStoreNamedField>(table,
12969 HObjectAccess::ForOrderedHashTableDataTableIndex<
12970 CollectionType, kBucketCount>(i),
12971 undefined);
12972 }
12973
12974 return table;
12975}
12976
12977
12978void HOptimizedGraphBuilder::GenerateSetInitialize(CallRuntime* call) {
12979 DCHECK(call->arguments()->length() == 1);
12980 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12981 HValue* receiver = Pop();
12982
12983 NoObservableSideEffectsScope no_effects(this);
12984 HValue* table = BuildAllocateOrderedHashTable<OrderedHashSet>();
12985 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12986 return ast_context()->ReturnValue(receiver);
12987}
12988
12989
12990void HOptimizedGraphBuilder::GenerateMapInitialize(CallRuntime* call) {
12991 DCHECK(call->arguments()->length() == 1);
12992 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12993 HValue* receiver = Pop();
12994
12995 NoObservableSideEffectsScope no_effects(this);
12996 HValue* table = BuildAllocateOrderedHashTable<OrderedHashMap>();
12997 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12998 return ast_context()->ReturnValue(receiver);
12999}
13000
13001
13002template <typename CollectionType>
13003void HOptimizedGraphBuilder::BuildOrderedHashTableClear(HValue* receiver) {
13004 HValue* old_table = Add<HLoadNamedField>(
13005 receiver, nullptr, HObjectAccess::ForJSCollectionTable());
13006 HValue* new_table = BuildAllocateOrderedHashTable<CollectionType>();
13007 Add<HStoreNamedField>(
13008 old_table, HObjectAccess::ForOrderedHashTableNextTable<CollectionType>(),
13009 new_table);
13010 Add<HStoreNamedField>(
13011 old_table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
13012 CollectionType>(),
13013 Add<HConstant>(CollectionType::kClearedTableSentinel));
13014 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(),
13015 new_table);
13016}
13017
13018
13019void HOptimizedGraphBuilder::GenerateSetClear(CallRuntime* call) {
13020 DCHECK(call->arguments()->length() == 1);
13021 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
13022 HValue* receiver = Pop();
13023
13024 NoObservableSideEffectsScope no_effects(this);
13025 BuildOrderedHashTableClear<OrderedHashSet>(receiver);
13026 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
13027}
13028
13029
13030void HOptimizedGraphBuilder::GenerateMapClear(CallRuntime* call) {
13031 DCHECK(call->arguments()->length() == 1);
13032 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
13033 HValue* receiver = Pop();
13034
13035 NoObservableSideEffectsScope no_effects(this);
13036 BuildOrderedHashTableClear<OrderedHashMap>(receiver);
13037 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
13038}
13039
13040
13041void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
13042 DCHECK(call->arguments()->length() == 1);
13043 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
13044 HValue* value = Pop();
13045 HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
13046 return ast_context()->ReturnInstruction(result, call->id());
13047}
13048
13049
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013050void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
13051 CallRuntime* call) {
13052 Add<HDebugBreak>();
13053 return ast_context()->ReturnValue(graph()->GetConstant0());
13054}
13055
13056
13057void HOptimizedGraphBuilder::GenerateDebugIsActive(CallRuntime* call) {
13058 DCHECK(call->arguments()->length() == 0);
13059 HValue* ref =
13060 Add<HConstant>(ExternalReference::debug_is_active_address(isolate()));
13061 HValue* value =
13062 Add<HLoadNamedField>(ref, nullptr, HObjectAccess::ForExternalUInteger8());
13063 return ast_context()->ReturnValue(value);
13064}
13065
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013066#undef CHECK_BAILOUT
13067#undef CHECK_ALIVE
13068
13069
13070HEnvironment::HEnvironment(HEnvironment* outer,
13071 Scope* scope,
13072 Handle<JSFunction> closure,
13073 Zone* zone)
13074 : closure_(closure),
13075 values_(0, zone),
13076 frame_type_(JS_FUNCTION),
13077 parameter_count_(0),
13078 specials_count_(1),
13079 local_count_(0),
13080 outer_(outer),
13081 entry_(NULL),
13082 pop_count_(0),
13083 push_count_(0),
13084 ast_id_(BailoutId::None()),
13085 zone_(zone) {
13086 Scope* declaration_scope = scope->DeclarationScope();
13087 Initialize(declaration_scope->num_parameters() + 1,
13088 declaration_scope->num_stack_slots(), 0);
13089}
13090
13091
13092HEnvironment::HEnvironment(Zone* zone, int parameter_count)
13093 : values_(0, zone),
13094 frame_type_(STUB),
13095 parameter_count_(parameter_count),
13096 specials_count_(1),
13097 local_count_(0),
13098 outer_(NULL),
13099 entry_(NULL),
13100 pop_count_(0),
13101 push_count_(0),
13102 ast_id_(BailoutId::None()),
13103 zone_(zone) {
13104 Initialize(parameter_count, 0, 0);
13105}
13106
13107
13108HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
13109 : values_(0, zone),
13110 frame_type_(JS_FUNCTION),
13111 parameter_count_(0),
13112 specials_count_(0),
13113 local_count_(0),
13114 outer_(NULL),
13115 entry_(NULL),
13116 pop_count_(0),
13117 push_count_(0),
13118 ast_id_(other->ast_id()),
13119 zone_(zone) {
13120 Initialize(other);
13121}
13122
13123
13124HEnvironment::HEnvironment(HEnvironment* outer,
13125 Handle<JSFunction> closure,
13126 FrameType frame_type,
13127 int arguments,
13128 Zone* zone)
13129 : closure_(closure),
13130 values_(arguments, zone),
13131 frame_type_(frame_type),
13132 parameter_count_(arguments),
13133 specials_count_(0),
13134 local_count_(0),
13135 outer_(outer),
13136 entry_(NULL),
13137 pop_count_(0),
13138 push_count_(0),
13139 ast_id_(BailoutId::None()),
13140 zone_(zone) {
13141}
13142
13143
13144void HEnvironment::Initialize(int parameter_count,
13145 int local_count,
13146 int stack_height) {
13147 parameter_count_ = parameter_count;
13148 local_count_ = local_count;
13149
13150 // Avoid reallocating the temporaries' backing store on the first Push.
13151 int total = parameter_count + specials_count_ + local_count + stack_height;
13152 values_.Initialize(total + 4, zone());
13153 for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
13154}
13155
13156
13157void HEnvironment::Initialize(const HEnvironment* other) {
13158 closure_ = other->closure();
13159 values_.AddAll(other->values_, zone());
13160 assigned_variables_.Union(other->assigned_variables_, zone());
13161 frame_type_ = other->frame_type_;
13162 parameter_count_ = other->parameter_count_;
13163 local_count_ = other->local_count_;
13164 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
13165 entry_ = other->entry_;
13166 pop_count_ = other->pop_count_;
13167 push_count_ = other->push_count_;
13168 specials_count_ = other->specials_count_;
13169 ast_id_ = other->ast_id_;
13170}
13171
13172
13173void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
13174 DCHECK(!block->IsLoopHeader());
13175 DCHECK(values_.length() == other->values_.length());
13176
13177 int length = values_.length();
13178 for (int i = 0; i < length; ++i) {
13179 HValue* value = values_[i];
13180 if (value != NULL && value->IsPhi() && value->block() == block) {
13181 // There is already a phi for the i'th value.
13182 HPhi* phi = HPhi::cast(value);
13183 // Assert index is correct and that we haven't missed an incoming edge.
13184 DCHECK(phi->merged_index() == i || !phi->HasMergedIndex());
13185 DCHECK(phi->OperandCount() == block->predecessors()->length());
13186 phi->AddInput(other->values_[i]);
13187 } else if (values_[i] != other->values_[i]) {
13188 // There is a fresh value on the incoming edge, a phi is needed.
13189 DCHECK(values_[i] != NULL && other->values_[i] != NULL);
13190 HPhi* phi = block->AddNewPhi(i);
13191 HValue* old_value = values_[i];
13192 for (int j = 0; j < block->predecessors()->length(); j++) {
13193 phi->AddInput(old_value);
13194 }
13195 phi->AddInput(other->values_[i]);
13196 this->values_[i] = phi;
13197 }
13198 }
13199}
13200
13201
13202void HEnvironment::Bind(int index, HValue* value) {
13203 DCHECK(value != NULL);
13204 assigned_variables_.Add(index, zone());
13205 values_[index] = value;
13206}
13207
13208
13209bool HEnvironment::HasExpressionAt(int index) const {
13210 return index >= parameter_count_ + specials_count_ + local_count_;
13211}
13212
13213
13214bool HEnvironment::ExpressionStackIsEmpty() const {
13215 DCHECK(length() >= first_expression_index());
13216 return length() == first_expression_index();
13217}
13218
13219
13220void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
13221 int count = index_from_top + 1;
13222 int index = values_.length() - count;
13223 DCHECK(HasExpressionAt(index));
13224 // The push count must include at least the element in question or else
13225 // the new value will not be included in this environment's history.
13226 if (push_count_ < count) {
13227 // This is the same effect as popping then re-pushing 'count' elements.
13228 pop_count_ += (count - push_count_);
13229 push_count_ = count;
13230 }
13231 values_[index] = value;
13232}
13233
13234
13235HValue* HEnvironment::RemoveExpressionStackAt(int index_from_top) {
13236 int count = index_from_top + 1;
13237 int index = values_.length() - count;
13238 DCHECK(HasExpressionAt(index));
13239 // Simulate popping 'count' elements and then
13240 // pushing 'count - 1' elements back.
13241 pop_count_ += Max(count - push_count_, 0);
13242 push_count_ = Max(push_count_ - count, 0) + (count - 1);
13243 return values_.Remove(index);
13244}
13245
13246
13247void HEnvironment::Drop(int count) {
13248 for (int i = 0; i < count; ++i) {
13249 Pop();
13250 }
13251}
13252
13253
13254void HEnvironment::Print() const {
13255 OFStream os(stdout);
13256 os << *this << "\n";
13257}
13258
13259
13260HEnvironment* HEnvironment::Copy() const {
13261 return new(zone()) HEnvironment(this, zone());
13262}
13263
13264
13265HEnvironment* HEnvironment::CopyWithoutHistory() const {
13266 HEnvironment* result = Copy();
13267 result->ClearHistory();
13268 return result;
13269}
13270
13271
13272HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
13273 HEnvironment* new_env = Copy();
13274 for (int i = 0; i < values_.length(); ++i) {
13275 HPhi* phi = loop_header->AddNewPhi(i);
13276 phi->AddInput(values_[i]);
13277 new_env->values_[i] = phi;
13278 }
13279 new_env->ClearHistory();
13280 return new_env;
13281}
13282
13283
13284HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
13285 Handle<JSFunction> target,
13286 FrameType frame_type,
13287 int arguments) const {
13288 HEnvironment* new_env =
13289 new(zone()) HEnvironment(outer, target, frame_type,
13290 arguments + 1, zone());
13291 for (int i = 0; i <= arguments; ++i) { // Include receiver.
13292 new_env->Push(ExpressionStackAt(arguments - i));
13293 }
13294 new_env->ClearHistory();
13295 return new_env;
13296}
13297
Ben Murdochda12d292016-06-02 14:46:10 +010013298void HEnvironment::MarkAsTailCaller() {
13299 DCHECK_EQ(JS_FUNCTION, frame_type());
13300 frame_type_ = TAIL_CALLER_FUNCTION;
13301}
13302
13303void HEnvironment::ClearTailCallerMark() {
13304 DCHECK_EQ(TAIL_CALLER_FUNCTION, frame_type());
13305 frame_type_ = JS_FUNCTION;
13306}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013307
13308HEnvironment* HEnvironment::CopyForInlining(
Ben Murdochda12d292016-06-02 14:46:10 +010013309 Handle<JSFunction> target, int arguments, FunctionLiteral* function,
13310 HConstant* undefined, InliningKind inlining_kind,
13311 TailCallMode syntactic_tail_call_mode) const {
13312 DCHECK_EQ(JS_FUNCTION, frame_type());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013313
13314 // Outer environment is a copy of this one without the arguments.
13315 int arity = function->scope()->num_parameters();
13316
13317 HEnvironment* outer = Copy();
13318 outer->Drop(arguments + 1); // Including receiver.
13319 outer->ClearHistory();
13320
Ben Murdochda12d292016-06-02 14:46:10 +010013321 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
13322 DCHECK_EQ(NORMAL_RETURN, inlining_kind);
13323 outer->MarkAsTailCaller();
13324 }
13325
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013326 if (inlining_kind == CONSTRUCT_CALL_RETURN) {
13327 // Create artificial constructor stub environment. The receiver should
13328 // actually be the constructor function, but we pass the newly allocated
13329 // object instead, DoComputeConstructStubFrame() relies on that.
13330 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
13331 } else if (inlining_kind == GETTER_CALL_RETURN) {
13332 // We need an additional StackFrame::INTERNAL frame for restoring the
13333 // correct context.
13334 outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
13335 } else if (inlining_kind == SETTER_CALL_RETURN) {
13336 // We need an additional StackFrame::INTERNAL frame for temporarily saving
13337 // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
13338 outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
13339 }
13340
13341 if (arity != arguments) {
13342 // Create artificial arguments adaptation environment.
13343 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
13344 }
13345
13346 HEnvironment* inner =
13347 new(zone()) HEnvironment(outer, function->scope(), target, zone());
13348 // Get the argument values from the original environment.
13349 for (int i = 0; i <= arity; ++i) { // Include receiver.
13350 HValue* push = (i <= arguments) ?
13351 ExpressionStackAt(arguments - i) : undefined;
13352 inner->SetValueAt(i, push);
13353 }
13354 inner->SetValueAt(arity + 1, context());
13355 for (int i = arity + 2; i < inner->length(); ++i) {
13356 inner->SetValueAt(i, undefined);
13357 }
13358
13359 inner->set_ast_id(BailoutId::FunctionEntry());
13360 return inner;
13361}
13362
13363
13364std::ostream& operator<<(std::ostream& os, const HEnvironment& env) {
13365 for (int i = 0; i < env.length(); i++) {
13366 if (i == 0) os << "parameters\n";
13367 if (i == env.parameter_count()) os << "specials\n";
13368 if (i == env.parameter_count() + env.specials_count()) os << "locals\n";
13369 if (i == env.parameter_count() + env.specials_count() + env.local_count()) {
13370 os << "expressions\n";
13371 }
13372 HValue* val = env.values()->at(i);
13373 os << i << ": ";
13374 if (val != NULL) {
13375 os << val;
13376 } else {
13377 os << "NULL";
13378 }
13379 os << "\n";
13380 }
13381 return os << "\n";
13382}
13383
13384
13385void HTracer::TraceCompilation(CompilationInfo* info) {
13386 Tag tag(this, "compilation");
Ben Murdochc5610432016-08-08 18:44:38 +010013387 std::string name;
13388 Object* source_name = info->script()->name();
13389 if (source_name->IsString()) {
13390 String* str = String::cast(source_name);
13391 if (str->length() > 0) {
13392 name.append(str->ToCString().get());
13393 name.append(":");
13394 }
13395 }
13396 base::SmartArrayPointer<char> method_name = info->GetDebugName();
13397 name.append(method_name.get());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013398 if (info->IsOptimizing()) {
Ben Murdochc5610432016-08-08 18:44:38 +010013399 PrintStringProperty("name", name.c_str());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013400 PrintIndent();
Ben Murdochc5610432016-08-08 18:44:38 +010013401 trace_.Add("method \"%s:%d\"\n", method_name.get(),
13402 info->optimization_id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013403 } else {
Ben Murdochc5610432016-08-08 18:44:38 +010013404 PrintStringProperty("name", name.c_str());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013405 PrintStringProperty("method", "stub");
13406 }
13407 PrintLongProperty("date",
13408 static_cast<int64_t>(base::OS::TimeCurrentMillis()));
13409}
13410
13411
13412void HTracer::TraceLithium(const char* name, LChunk* chunk) {
13413 DCHECK(!chunk->isolate()->concurrent_recompilation_enabled());
13414 AllowHandleDereference allow_deref;
13415 AllowDeferredHandleDereference allow_deferred_deref;
13416 Trace(name, chunk->graph(), chunk);
13417}
13418
13419
13420void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
13421 DCHECK(!graph->isolate()->concurrent_recompilation_enabled());
13422 AllowHandleDereference allow_deref;
13423 AllowDeferredHandleDereference allow_deferred_deref;
13424 Trace(name, graph, NULL);
13425}
13426
13427
13428void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
13429 Tag tag(this, "cfg");
13430 PrintStringProperty("name", name);
13431 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
13432 for (int i = 0; i < blocks->length(); i++) {
13433 HBasicBlock* current = blocks->at(i);
13434 Tag block_tag(this, "block");
13435 PrintBlockProperty("name", current->block_id());
13436 PrintIntProperty("from_bci", -1);
13437 PrintIntProperty("to_bci", -1);
13438
13439 if (!current->predecessors()->is_empty()) {
13440 PrintIndent();
13441 trace_.Add("predecessors");
13442 for (int j = 0; j < current->predecessors()->length(); ++j) {
13443 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
13444 }
13445 trace_.Add("\n");
13446 } else {
13447 PrintEmptyProperty("predecessors");
13448 }
13449
13450 if (current->end()->SuccessorCount() == 0) {
13451 PrintEmptyProperty("successors");
13452 } else {
13453 PrintIndent();
13454 trace_.Add("successors");
13455 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
13456 trace_.Add(" \"B%d\"", it.Current()->block_id());
13457 }
13458 trace_.Add("\n");
13459 }
13460
13461 PrintEmptyProperty("xhandlers");
13462
13463 {
13464 PrintIndent();
13465 trace_.Add("flags");
13466 if (current->IsLoopSuccessorDominator()) {
13467 trace_.Add(" \"dom-loop-succ\"");
13468 }
13469 if (current->IsUnreachable()) {
13470 trace_.Add(" \"dead\"");
13471 }
13472 if (current->is_osr_entry()) {
13473 trace_.Add(" \"osr\"");
13474 }
13475 trace_.Add("\n");
13476 }
13477
13478 if (current->dominator() != NULL) {
13479 PrintBlockProperty("dominator", current->dominator()->block_id());
13480 }
13481
13482 PrintIntProperty("loop_depth", current->LoopNestingDepth());
13483
13484 if (chunk != NULL) {
13485 int first_index = current->first_instruction_index();
13486 int last_index = current->last_instruction_index();
13487 PrintIntProperty(
13488 "first_lir_id",
13489 LifetimePosition::FromInstructionIndex(first_index).Value());
13490 PrintIntProperty(
13491 "last_lir_id",
13492 LifetimePosition::FromInstructionIndex(last_index).Value());
13493 }
13494
13495 {
13496 Tag states_tag(this, "states");
13497 Tag locals_tag(this, "locals");
13498 int total = current->phis()->length();
13499 PrintIntProperty("size", current->phis()->length());
13500 PrintStringProperty("method", "None");
13501 for (int j = 0; j < total; ++j) {
13502 HPhi* phi = current->phis()->at(j);
13503 PrintIndent();
13504 std::ostringstream os;
13505 os << phi->merged_index() << " " << NameOf(phi) << " " << *phi << "\n";
13506 trace_.Add(os.str().c_str());
13507 }
13508 }
13509
13510 {
13511 Tag HIR_tag(this, "HIR");
13512 for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
13513 HInstruction* instruction = it.Current();
13514 int uses = instruction->UseCount();
13515 PrintIndent();
13516 std::ostringstream os;
13517 os << "0 " << uses << " " << NameOf(instruction) << " " << *instruction;
13518 if (graph->info()->is_tracking_positions() &&
13519 instruction->has_position() && instruction->position().raw() != 0) {
13520 const SourcePosition pos = instruction->position();
13521 os << " pos:";
13522 if (pos.inlining_id() != 0) os << pos.inlining_id() << "_";
13523 os << pos.position();
13524 }
13525 os << " <|@\n";
13526 trace_.Add(os.str().c_str());
13527 }
13528 }
13529
13530
13531 if (chunk != NULL) {
13532 Tag LIR_tag(this, "LIR");
13533 int first_index = current->first_instruction_index();
13534 int last_index = current->last_instruction_index();
13535 if (first_index != -1 && last_index != -1) {
13536 const ZoneList<LInstruction*>* instructions = chunk->instructions();
13537 for (int i = first_index; i <= last_index; ++i) {
13538 LInstruction* linstr = instructions->at(i);
13539 if (linstr != NULL) {
13540 PrintIndent();
13541 trace_.Add("%d ",
13542 LifetimePosition::FromInstructionIndex(i).Value());
13543 linstr->PrintTo(&trace_);
13544 std::ostringstream os;
13545 os << " [hir:" << NameOf(linstr->hydrogen_value()) << "] <|@\n";
13546 trace_.Add(os.str().c_str());
13547 }
13548 }
13549 }
13550 }
13551 }
13552}
13553
13554
13555void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
13556 Tag tag(this, "intervals");
13557 PrintStringProperty("name", name);
13558
13559 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
13560 for (int i = 0; i < fixed_d->length(); ++i) {
13561 TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
13562 }
13563
13564 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
13565 for (int i = 0; i < fixed->length(); ++i) {
13566 TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
13567 }
13568
13569 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
13570 for (int i = 0; i < live_ranges->length(); ++i) {
13571 TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
13572 }
13573}
13574
13575
13576void HTracer::TraceLiveRange(LiveRange* range, const char* type,
13577 Zone* zone) {
13578 if (range != NULL && !range->IsEmpty()) {
13579 PrintIndent();
13580 trace_.Add("%d %s", range->id(), type);
13581 if (range->HasRegisterAssigned()) {
13582 LOperand* op = range->CreateAssignedOperand(zone);
13583 int assigned_reg = op->index();
13584 if (op->IsDoubleRegister()) {
13585 trace_.Add(" \"%s\"",
13586 DoubleRegister::from_code(assigned_reg).ToString());
13587 } else {
13588 DCHECK(op->IsRegister());
13589 trace_.Add(" \"%s\"", Register::from_code(assigned_reg).ToString());
13590 }
13591 } else if (range->IsSpilled()) {
13592 LOperand* op = range->TopLevel()->GetSpillOperand();
13593 if (op->IsDoubleStackSlot()) {
13594 trace_.Add(" \"double_stack:%d\"", op->index());
13595 } else {
13596 DCHECK(op->IsStackSlot());
13597 trace_.Add(" \"stack:%d\"", op->index());
13598 }
13599 }
13600 int parent_index = -1;
13601 if (range->IsChild()) {
13602 parent_index = range->parent()->id();
13603 } else {
13604 parent_index = range->id();
13605 }
13606 LOperand* op = range->FirstHint();
13607 int hint_index = -1;
13608 if (op != NULL && op->IsUnallocated()) {
13609 hint_index = LUnallocated::cast(op)->virtual_register();
13610 }
13611 trace_.Add(" %d %d", parent_index, hint_index);
13612 UseInterval* cur_interval = range->first_interval();
13613 while (cur_interval != NULL && range->Covers(cur_interval->start())) {
13614 trace_.Add(" [%d, %d[",
13615 cur_interval->start().Value(),
13616 cur_interval->end().Value());
13617 cur_interval = cur_interval->next();
13618 }
13619
13620 UsePosition* current_pos = range->first_pos();
13621 while (current_pos != NULL) {
13622 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
13623 trace_.Add(" %d M", current_pos->pos().Value());
13624 }
13625 current_pos = current_pos->next();
13626 }
13627
13628 trace_.Add(" \"\"\n");
13629 }
13630}
13631
13632
13633void HTracer::FlushToFile() {
13634 AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
13635 false);
13636 trace_.Reset();
13637}
13638
13639
13640void HStatistics::Initialize(CompilationInfo* info) {
13641 if (!info->has_shared_info()) return;
13642 source_size_ += info->shared_info()->SourceSize();
13643}
13644
13645
13646void HStatistics::Print() {
13647 PrintF(
13648 "\n"
13649 "----------------------------------------"
13650 "----------------------------------------\n"
13651 "--- Hydrogen timing results:\n"
13652 "----------------------------------------"
13653 "----------------------------------------\n");
13654 base::TimeDelta sum;
13655 for (int i = 0; i < times_.length(); ++i) {
13656 sum += times_[i];
13657 }
13658
13659 for (int i = 0; i < names_.length(); ++i) {
13660 PrintF("%33s", names_[i]);
13661 double ms = times_[i].InMillisecondsF();
13662 double percent = times_[i].PercentOf(sum);
13663 PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
13664
13665 size_t size = sizes_[i];
13666 double size_percent = static_cast<double>(size) * 100 / total_size_;
13667 PrintF(" %9zu bytes / %4.1f %%\n", size, size_percent);
13668 }
13669
13670 PrintF(
13671 "----------------------------------------"
13672 "----------------------------------------\n");
13673 base::TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
13674 PrintF("%33s %8.3f ms / %4.1f %% \n", "Create graph",
13675 create_graph_.InMillisecondsF(), create_graph_.PercentOf(total));
13676 PrintF("%33s %8.3f ms / %4.1f %% \n", "Optimize graph",
13677 optimize_graph_.InMillisecondsF(), optimize_graph_.PercentOf(total));
13678 PrintF("%33s %8.3f ms / %4.1f %% \n", "Generate and install code",
13679 generate_code_.InMillisecondsF(), generate_code_.PercentOf(total));
13680 PrintF(
13681 "----------------------------------------"
13682 "----------------------------------------\n");
13683 PrintF("%33s %8.3f ms %9zu bytes\n", "Total",
13684 total.InMillisecondsF(), total_size_);
13685 PrintF("%33s (%.1f times slower than full code gen)\n", "",
13686 total.TimesOf(full_code_gen_));
13687
13688 double source_size_in_kb = static_cast<double>(source_size_) / 1024;
13689 double normalized_time = source_size_in_kb > 0
13690 ? total.InMillisecondsF() / source_size_in_kb
13691 : 0;
13692 double normalized_size_in_kb =
13693 source_size_in_kb > 0
13694 ? static_cast<double>(total_size_) / 1024 / source_size_in_kb
13695 : 0;
13696 PrintF("%33s %8.3f ms %7.3f kB allocated\n",
13697 "Average per kB source", normalized_time, normalized_size_in_kb);
13698}
13699
13700
13701void HStatistics::SaveTiming(const char* name, base::TimeDelta time,
13702 size_t size) {
13703 total_size_ += size;
13704 for (int i = 0; i < names_.length(); ++i) {
13705 if (strcmp(names_[i], name) == 0) {
13706 times_[i] += time;
13707 sizes_[i] += size;
13708 return;
13709 }
13710 }
13711 names_.Add(name);
13712 times_.Add(time);
13713 sizes_.Add(size);
13714}
13715
13716
13717HPhase::~HPhase() {
13718 if (ShouldProduceTraceOutput()) {
13719 isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
13720 }
13721
13722#ifdef DEBUG
13723 graph_->Verify(false); // No full verify.
13724#endif
13725}
13726
13727} // namespace internal
13728} // namespace v8