blob: 0d92b2eebe5be196e17be4614eaeb10873ebf652 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "hydrogen.h"
29
30#include "codegen.h"
31#include "data-flow.h"
32#include "full-codegen.h"
33#include "hashmap.h"
34#include "lithium-allocator.h"
35#include "parser.h"
36#include "scopes.h"
37
38#if V8_TARGET_ARCH_IA32
39#include "ia32/lithium-codegen-ia32.h"
40#elif V8_TARGET_ARCH_X64
41#include "x64/lithium-codegen-x64.h"
42#elif V8_TARGET_ARCH_ARM
43#include "arm/lithium-codegen-arm.h"
44#else
45#error Unsupported target architecture.
46#endif
47
48namespace v8 {
49namespace internal {
50
51HBasicBlock::HBasicBlock(HGraph* graph)
52 : block_id_(graph->GetNextBlockID()),
53 graph_(graph),
54 phis_(4),
55 first_(NULL),
56 last_(NULL),
57 end_(NULL),
58 loop_information_(NULL),
59 predecessors_(2),
60 dominator_(NULL),
61 dominated_blocks_(4),
62 last_environment_(NULL),
63 argument_count_(-1),
64 first_instruction_index_(-1),
65 last_instruction_index_(-1),
66 deleted_phis_(4),
67 is_inline_return_target_(false) {
68}
69
70
71void HBasicBlock::AttachLoopInformation() {
72 ASSERT(!IsLoopHeader());
73 loop_information_ = new HLoopInformation(this);
74}
75
76
77void HBasicBlock::DetachLoopInformation() {
78 ASSERT(IsLoopHeader());
79 loop_information_ = NULL;
80}
81
82
83void HBasicBlock::AddPhi(HPhi* phi) {
84 ASSERT(!IsStartBlock());
85 phis_.Add(phi);
86 phi->SetBlock(this);
87}
88
89
90void HBasicBlock::RemovePhi(HPhi* phi) {
91 ASSERT(phi->block() == this);
92 ASSERT(phis_.Contains(phi));
93 ASSERT(phi->HasNoUses());
94 phi->ClearOperands();
95 phis_.RemoveElement(phi);
96 phi->SetBlock(NULL);
97}
98
99
100void HBasicBlock::AddInstruction(HInstruction* instr) {
101 ASSERT(!IsStartBlock() || !IsFinished());
102 ASSERT(!instr->IsLinked());
103 ASSERT(!IsFinished());
104 if (first_ == NULL) {
105 HBlockEntry* entry = new HBlockEntry();
106 entry->InitializeAsFirst(this);
107 first_ = entry;
108 }
109 instr->InsertAfter(GetLastInstruction());
110}
111
112
113HInstruction* HBasicBlock::GetLastInstruction() {
114 if (end_ != NULL) return end_->previous();
115 if (first_ == NULL) return NULL;
116 if (last_ == NULL) last_ = first_;
117 while (last_->next() != NULL) last_ = last_->next();
118 return last_;
119}
120
121
122HSimulate* HBasicBlock::CreateSimulate(int id) {
123 ASSERT(HasEnvironment());
124 HEnvironment* environment = last_environment();
125 ASSERT(id == AstNode::kNoNumber ||
126 environment->closure()->shared()->VerifyBailoutId(id));
127
128 int push_count = environment->push_count();
129 int pop_count = environment->pop_count();
130
Steve Block9fac8402011-05-12 15:51:54 +0100131 int length = environment->length();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100132 HSimulate* instr = new HSimulate(id, pop_count, length);
133 for (int i = push_count - 1; i >= 0; --i) {
134 instr->AddPushedValue(environment->ExpressionStackAt(i));
135 }
136 for (int i = 0; i < environment->assigned_variables()->length(); ++i) {
137 int index = environment->assigned_variables()->at(i);
138 instr->AddAssignedValue(index, environment->Lookup(index));
139 }
140 environment->ClearHistory();
141 return instr;
142}
143
144
145void HBasicBlock::Finish(HControlInstruction* end) {
146 ASSERT(!IsFinished());
147 AddInstruction(end);
148 end_ = end;
149 if (end->FirstSuccessor() != NULL) {
150 end->FirstSuccessor()->RegisterPredecessor(this);
151 if (end->SecondSuccessor() != NULL) {
152 end->SecondSuccessor()->RegisterPredecessor(this);
153 }
154 }
155}
156
157
158void HBasicBlock::Goto(HBasicBlock* block, bool include_stack_check) {
159 AddSimulate(AstNode::kNoNumber);
160 HGoto* instr = new HGoto(block);
161 instr->set_include_stack_check(include_stack_check);
162 Finish(instr);
163}
164
165
166void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
167 ASSERT(!HasEnvironment());
168 ASSERT(first() == NULL);
169 UpdateEnvironment(env);
170}
171
172
173void HBasicBlock::SetJoinId(int id) {
174 int length = predecessors_.length();
175 ASSERT(length > 0);
176 for (int i = 0; i < length; i++) {
177 HBasicBlock* predecessor = predecessors_[i];
178 ASSERT(predecessor->end()->IsGoto());
179 HSimulate* simulate = HSimulate::cast(predecessor->GetLastInstruction());
180 // We only need to verify the ID once.
181 ASSERT(i != 0 ||
182 predecessor->last_environment()->closure()->shared()
183 ->VerifyBailoutId(id));
184 simulate->set_ast_id(id);
185 }
186}
187
188
189bool HBasicBlock::Dominates(HBasicBlock* other) const {
190 HBasicBlock* current = other->dominator();
191 while (current != NULL) {
192 if (current == this) return true;
193 current = current->dominator();
194 }
195 return false;
196}
197
198
199void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
200 ASSERT(IsLoopHeader());
201
202 SetJoinId(stmt->EntryId());
203 if (predecessors()->length() == 1) {
204 // This is a degenerated loop.
205 DetachLoopInformation();
206 return;
207 }
208
209 // Only the first entry into the loop is from outside the loop. All other
210 // entries must be back edges.
211 for (int i = 1; i < predecessors()->length(); ++i) {
212 loop_information()->RegisterBackEdge(predecessors()->at(i));
213 }
214}
215
216
217void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
218 if (!predecessors_.is_empty()) {
219 // Only loop header blocks can have a predecessor added after
220 // instructions have been added to the block (they have phis for all
221 // values in the environment, these phis may be eliminated later).
222 ASSERT(IsLoopHeader() || first_ == NULL);
223 HEnvironment* incoming_env = pred->last_environment();
224 if (IsLoopHeader()) {
Steve Block9fac8402011-05-12 15:51:54 +0100225 ASSERT(phis()->length() == incoming_env->length());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100226 for (int i = 0; i < phis_.length(); ++i) {
227 phis_[i]->AddInput(incoming_env->values()->at(i));
228 }
229 } else {
230 last_environment()->AddIncomingEdge(this, pred->last_environment());
231 }
232 } else if (!HasEnvironment() && !IsFinished()) {
233 ASSERT(!IsLoopHeader());
234 SetInitialEnvironment(pred->last_environment()->Copy());
235 }
236
237 predecessors_.Add(pred);
238}
239
240
241void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
242 ASSERT(!dominated_blocks_.Contains(block));
243 // Keep the list of dominated blocks sorted such that if there is two
244 // succeeding block in this list, the predecessor is before the successor.
245 int index = 0;
246 while (index < dominated_blocks_.length() &&
247 dominated_blocks_[index]->block_id() < block->block_id()) {
248 ++index;
249 }
250 dominated_blocks_.InsertAt(index, block);
251}
252
253
254void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
255 if (dominator_ == NULL) {
256 dominator_ = other;
257 other->AddDominatedBlock(this);
258 } else if (other->dominator() != NULL) {
259 HBasicBlock* first = dominator_;
260 HBasicBlock* second = other;
261
262 while (first != second) {
263 if (first->block_id() > second->block_id()) {
264 first = first->dominator();
265 } else {
266 second = second->dominator();
267 }
268 ASSERT(first != NULL && second != NULL);
269 }
270
271 if (dominator_ != first) {
272 ASSERT(dominator_->dominated_blocks_.Contains(this));
273 dominator_->dominated_blocks_.RemoveElement(this);
274 dominator_ = first;
275 first->AddDominatedBlock(this);
276 }
277 }
278}
279
280
281int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
282 for (int i = 0; i < predecessors_.length(); ++i) {
283 if (predecessors_[i] == predecessor) return i;
284 }
285 UNREACHABLE();
286 return -1;
287}
288
289
290#ifdef DEBUG
291void HBasicBlock::Verify() {
292 // Check that every block is finished.
293 ASSERT(IsFinished());
294 ASSERT(block_id() >= 0);
295
296 // Verify that all blocks targetting a branch target, have the same boolean
297 // value on top of their expression stack.
298 if (!cond().is_null()) {
299 ASSERT(predecessors()->length() > 0);
300 for (int i = 1; i < predecessors()->length(); i++) {
301 HBasicBlock* pred = predecessors()->at(i);
302 HValue* top = pred->last_environment()->Top();
303 ASSERT(top->IsConstant());
304 Object* a = *HConstant::cast(top)->handle();
305 Object* b = *cond();
306 ASSERT(a == b);
307 }
308 }
309}
310#endif
311
312
313void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
314 this->back_edges_.Add(block);
315 AddBlock(block);
316}
317
318
319HBasicBlock* HLoopInformation::GetLastBackEdge() const {
320 int max_id = -1;
321 HBasicBlock* result = NULL;
322 for (int i = 0; i < back_edges_.length(); ++i) {
323 HBasicBlock* cur = back_edges_[i];
324 if (cur->block_id() > max_id) {
325 max_id = cur->block_id();
326 result = cur;
327 }
328 }
329 return result;
330}
331
332
333void HLoopInformation::AddBlock(HBasicBlock* block) {
334 if (block == loop_header()) return;
335 if (block->parent_loop_header() == loop_header()) return;
336 if (block->parent_loop_header() != NULL) {
337 AddBlock(block->parent_loop_header());
338 } else {
339 block->set_parent_loop_header(loop_header());
340 blocks_.Add(block);
341 for (int i = 0; i < block->predecessors()->length(); ++i) {
342 AddBlock(block->predecessors()->at(i));
343 }
344 }
345}
346
347
348#ifdef DEBUG
349
350// Checks reachability of the blocks in this graph and stores a bit in
351// the BitVector "reachable()" for every block that can be reached
352// from the start block of the graph. If "dont_visit" is non-null, the given
353// block is treated as if it would not be part of the graph. "visited_count()"
354// returns the number of reachable blocks.
355class ReachabilityAnalyzer BASE_EMBEDDED {
356 public:
357 ReachabilityAnalyzer(HBasicBlock* entry_block,
358 int block_count,
359 HBasicBlock* dont_visit)
360 : visited_count_(0),
361 stack_(16),
362 reachable_(block_count),
363 dont_visit_(dont_visit) {
364 PushBlock(entry_block);
365 Analyze();
366 }
367
368 int visited_count() const { return visited_count_; }
369 const BitVector* reachable() const { return &reachable_; }
370
371 private:
372 void PushBlock(HBasicBlock* block) {
373 if (block != NULL && block != dont_visit_ &&
374 !reachable_.Contains(block->block_id())) {
375 reachable_.Add(block->block_id());
376 stack_.Add(block);
377 visited_count_++;
378 }
379 }
380
381 void Analyze() {
382 while (!stack_.is_empty()) {
383 HControlInstruction* end = stack_.RemoveLast()->end();
384 PushBlock(end->FirstSuccessor());
385 PushBlock(end->SecondSuccessor());
386 }
387 }
388
389 int visited_count_;
390 ZoneList<HBasicBlock*> stack_;
391 BitVector reachable_;
392 HBasicBlock* dont_visit_;
393};
394
395
396void HGraph::Verify() const {
397 for (int i = 0; i < blocks_.length(); i++) {
398 HBasicBlock* block = blocks_.at(i);
399
400 block->Verify();
401
402 // Check that every block contains at least one node and that only the last
403 // node is a control instruction.
404 HInstruction* current = block->first();
405 ASSERT(current != NULL && current->IsBlockEntry());
406 while (current != NULL) {
407 ASSERT((current->next() == NULL) == current->IsControlInstruction());
408 ASSERT(current->block() == block);
409 current->Verify();
410 current = current->next();
411 }
412
413 // Check that successors are correctly set.
414 HBasicBlock* first = block->end()->FirstSuccessor();
415 HBasicBlock* second = block->end()->SecondSuccessor();
416 ASSERT(second == NULL || first != NULL);
417
418 // Check that the predecessor array is correct.
419 if (first != NULL) {
420 ASSERT(first->predecessors()->Contains(block));
421 if (second != NULL) {
422 ASSERT(second->predecessors()->Contains(block));
423 }
424 }
425
426 // Check that phis have correct arguments.
427 for (int j = 0; j < block->phis()->length(); j++) {
428 HPhi* phi = block->phis()->at(j);
429 phi->Verify();
430 }
431
432 // Check that all join blocks have predecessors that end with an
433 // unconditional goto and agree on their environment node id.
434 if (block->predecessors()->length() >= 2) {
435 int id = block->predecessors()->first()->last_environment()->ast_id();
436 for (int k = 0; k < block->predecessors()->length(); k++) {
437 HBasicBlock* predecessor = block->predecessors()->at(k);
438 ASSERT(predecessor->end()->IsGoto());
439 ASSERT(predecessor->last_environment()->ast_id() == id);
440 }
441 }
442 }
443
444 // Check special property of first block to have no predecessors.
445 ASSERT(blocks_.at(0)->predecessors()->is_empty());
446
447 // Check that the graph is fully connected.
448 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
449 ASSERT(analyzer.visited_count() == blocks_.length());
450
451 // Check that entry block dominator is NULL.
452 ASSERT(entry_block_->dominator() == NULL);
453
454 // Check dominators.
455 for (int i = 0; i < blocks_.length(); ++i) {
456 HBasicBlock* block = blocks_.at(i);
457 if (block->dominator() == NULL) {
458 // Only start block may have no dominator assigned to.
459 ASSERT(i == 0);
460 } else {
461 // Assert that block is unreachable if dominator must not be visited.
462 ReachabilityAnalyzer dominator_analyzer(entry_block_,
463 blocks_.length(),
464 block->dominator());
465 ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
466 }
467 }
468}
469
470#endif
471
472
473HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
474 Object* value) {
475 if (!pointer->is_set()) {
476 HConstant* constant = new HConstant(Handle<Object>(value),
477 Representation::Tagged());
478 constant->InsertAfter(GetConstantUndefined());
479 pointer->set(constant);
480 }
481 return pointer->get();
482}
483
484
485HConstant* HGraph::GetConstant1() {
486 return GetConstant(&constant_1_, Smi::FromInt(1));
487}
488
489
490HConstant* HGraph::GetConstantMinus1() {
491 return GetConstant(&constant_minus1_, Smi::FromInt(-1));
492}
493
494
495HConstant* HGraph::GetConstantTrue() {
496 return GetConstant(&constant_true_, Heap::true_value());
497}
498
499
500HConstant* HGraph::GetConstantFalse() {
501 return GetConstant(&constant_false_, Heap::false_value());
502}
503
504
505void HSubgraph::AppendOptional(HSubgraph* graph,
506 bool on_true_branch,
507 HValue* boolean_value) {
508 ASSERT(HasExit() && graph->HasExit());
509 HBasicBlock* other_block = graph_->CreateBasicBlock();
510 HBasicBlock* join_block = graph_->CreateBasicBlock();
511
512 HBasicBlock* true_branch = other_block;
513 HBasicBlock* false_branch = graph->entry_block();
514 if (on_true_branch) {
515 true_branch = graph->entry_block();
516 false_branch = other_block;
517 }
518
519 exit_block_->Finish(new HBranch(true_branch, false_branch, boolean_value));
520 other_block->Goto(join_block);
521 graph->exit_block()->Goto(join_block);
522 exit_block_ = join_block;
523}
524
525
526void HSubgraph::AppendJoin(HSubgraph* then_graph,
527 HSubgraph* else_graph,
528 AstNode* node) {
529 if (then_graph->HasExit() && else_graph->HasExit()) {
530 // We need to merge, create new merge block.
531 HBasicBlock* join_block = graph_->CreateBasicBlock();
532 then_graph->exit_block()->Goto(join_block);
533 else_graph->exit_block()->Goto(join_block);
534 join_block->SetJoinId(node->id());
535 exit_block_ = join_block;
536 } else if (then_graph->HasExit()) {
537 exit_block_ = then_graph->exit_block_;
538 } else if (else_graph->HasExit()) {
539 exit_block_ = else_graph->exit_block_;
540 } else {
541 exit_block_ = NULL;
542 }
543}
544
545
546void HSubgraph::ResolveContinue(IterationStatement* statement) {
547 HBasicBlock* continue_block = BundleContinue(statement);
548 if (continue_block != NULL) {
549 exit_block_ = JoinBlocks(exit_block(),
550 continue_block,
551 statement->ContinueId());
552 }
553}
554
555
556HBasicBlock* HSubgraph::BundleBreak(BreakableStatement* statement) {
557 return BundleBreakContinue(statement, false, statement->ExitId());
558}
559
560
561HBasicBlock* HSubgraph::BundleContinue(IterationStatement* statement) {
562 return BundleBreakContinue(statement, true, statement->ContinueId());
563}
564
565
566HBasicBlock* HSubgraph::BundleBreakContinue(BreakableStatement* statement,
567 bool is_continue,
568 int join_id) {
569 HBasicBlock* result = NULL;
570 const ZoneList<BreakContinueInfo*>* infos = break_continue_info();
571 for (int i = 0; i < infos->length(); ++i) {
572 BreakContinueInfo* info = infos->at(i);
573 if (info->is_continue() == is_continue &&
574 info->target() == statement &&
575 !info->IsResolved()) {
576 if (result == NULL) {
577 result = graph_->CreateBasicBlock();
578 }
579 info->block()->Goto(result);
580 info->Resolve();
581 }
582 }
583
584 if (result != NULL) result->SetJoinId(join_id);
585
586 return result;
587}
588
589
590HBasicBlock* HSubgraph::JoinBlocks(HBasicBlock* a, HBasicBlock* b, int id) {
591 if (a == NULL) return b;
592 if (b == NULL) return a;
593 HBasicBlock* target = graph_->CreateBasicBlock();
594 a->Goto(target);
595 b->Goto(target);
596 target->SetJoinId(id);
597 return target;
598}
599
600
601void HSubgraph::AppendEndless(HSubgraph* body, IterationStatement* statement) {
602 ConnectExitTo(body->entry_block());
603 body->ResolveContinue(statement);
604 body->ConnectExitTo(body->entry_block(), true);
605 exit_block_ = body->BundleBreak(statement);
606 body->entry_block()->PostProcessLoopHeader(statement);
607}
608
609
610void HSubgraph::AppendDoWhile(HSubgraph* body,
611 IterationStatement* statement,
612 HSubgraph* go_back,
613 HSubgraph* exit) {
614 ConnectExitTo(body->entry_block());
615 go_back->ConnectExitTo(body->entry_block(), true);
616
617 HBasicBlock* break_block = body->BundleBreak(statement);
618 exit_block_ =
619 JoinBlocks(exit->exit_block(), break_block, statement->ExitId());
620 body->entry_block()->PostProcessLoopHeader(statement);
621}
622
623
624void HSubgraph::AppendWhile(HSubgraph* condition,
625 HSubgraph* body,
626 IterationStatement* statement,
627 HSubgraph* continue_subgraph,
628 HSubgraph* exit) {
629 ConnectExitTo(condition->entry_block());
630
631 HBasicBlock* break_block = body->BundleBreak(statement);
632 exit_block_ =
633 JoinBlocks(exit->exit_block(), break_block, statement->ExitId());
634
635 if (continue_subgraph != NULL) {
636 body->ConnectExitTo(continue_subgraph->entry_block(), true);
637 continue_subgraph->entry_block()->SetJoinId(statement->EntryId());
638 exit_block_ = JoinBlocks(exit_block_,
639 continue_subgraph->exit_block(),
640 statement->ExitId());
641 } else {
642 body->ConnectExitTo(condition->entry_block(), true);
643 }
644 condition->entry_block()->PostProcessLoopHeader(statement);
645}
646
647
648void HSubgraph::Append(HSubgraph* next, BreakableStatement* stmt) {
649 exit_block_->Goto(next->entry_block());
650 exit_block_ = next->exit_block_;
651
652 if (stmt != NULL) {
653 next->entry_block()->SetJoinId(stmt->EntryId());
654 HBasicBlock* break_block = next->BundleBreak(stmt);
655 exit_block_ = JoinBlocks(exit_block(), break_block, stmt->ExitId());
656 }
657}
658
659
660void HSubgraph::FinishExit(HControlInstruction* instruction) {
661 ASSERT(HasExit());
662 exit_block_->Finish(instruction);
663 exit_block_->ClearEnvironment();
664 exit_block_ = NULL;
665}
666
667
668void HSubgraph::FinishBreakContinue(BreakableStatement* target,
669 bool is_continue) {
670 ASSERT(!exit_block_->IsFinished());
671 BreakContinueInfo* info = new BreakContinueInfo(target, exit_block_,
672 is_continue);
673 break_continue_info_.Add(info);
674 exit_block_ = NULL;
675}
676
677
678HGraph::HGraph(CompilationInfo* info)
679 : HSubgraph(this),
680 next_block_id_(0),
681 info_(info),
682 blocks_(8),
683 values_(16),
684 phi_list_(NULL) {
685 start_environment_ = new HEnvironment(NULL, info->scope(), info->closure());
686 start_environment_->set_ast_id(info->function()->id());
687}
688
689
690Handle<Code> HGraph::Compile() {
691 int values = GetMaximumValueID();
692 if (values > LAllocator::max_initial_value_ids()) {
693 if (FLAG_trace_bailout) PrintF("Function is too big\n");
694 return Handle<Code>::null();
695 }
696
697 LAllocator allocator(values, this);
698 LChunkBuilder builder(this, &allocator);
699 LChunk* chunk = builder.Build();
700 if (chunk == NULL) return Handle<Code>::null();
701
702 if (!FLAG_alloc_lithium) return Handle<Code>::null();
703
704 allocator.Allocate(chunk);
705
706 if (!FLAG_use_lithium) return Handle<Code>::null();
707
708 MacroAssembler assembler(NULL, 0);
709 LCodeGen generator(chunk, &assembler, info());
710
711 if (FLAG_eliminate_empty_blocks) {
712 chunk->MarkEmptyBlocks();
713 }
714
715 if (generator.GenerateCode()) {
716 if (FLAG_trace_codegen) {
717 PrintF("Crankshaft Compiler - ");
718 }
719 CodeGenerator::MakeCodePrologue(info());
720 Code::Flags flags =
721 Code::ComputeFlags(Code::OPTIMIZED_FUNCTION, NOT_IN_LOOP);
722 Handle<Code> code =
723 CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
724 generator.FinishCode(code);
725 CodeGenerator::PrintCode(code, info());
726 return code;
727 }
728 return Handle<Code>::null();
729}
730
731
732HBasicBlock* HGraph::CreateBasicBlock() {
733 HBasicBlock* result = new HBasicBlock(this);
734 blocks_.Add(result);
735 return result;
736}
737
738
739void HGraph::Canonicalize() {
740 HPhase phase("Canonicalize", this);
741 if (FLAG_use_canonicalizing) {
742 for (int i = 0; i < blocks()->length(); ++i) {
743 HBasicBlock* b = blocks()->at(i);
744 for (HInstruction* insn = b->first(); insn != NULL; insn = insn->next()) {
745 HValue* value = insn->Canonicalize();
746 if (value != insn) {
747 if (value != NULL) {
748 insn->ReplaceAndDelete(value);
749 } else {
750 insn->Delete();
751 }
752 }
753 }
754 }
755 }
756}
757
758
759void HGraph::OrderBlocks() {
760 HPhase phase("Block ordering");
761 BitVector visited(blocks_.length());
762
763 ZoneList<HBasicBlock*> reverse_result(8);
764 HBasicBlock* start = blocks_[0];
765 Postorder(start, &visited, &reverse_result, NULL);
766
767 blocks_.Clear();
768 int index = 0;
769 for (int i = reverse_result.length() - 1; i >= 0; --i) {
770 HBasicBlock* b = reverse_result[i];
771 blocks_.Add(b);
772 b->set_block_id(index++);
773 }
774}
775
776
777void HGraph::PostorderLoopBlocks(HLoopInformation* loop,
778 BitVector* visited,
779 ZoneList<HBasicBlock*>* order,
780 HBasicBlock* loop_header) {
781 for (int i = 0; i < loop->blocks()->length(); ++i) {
782 HBasicBlock* b = loop->blocks()->at(i);
783 Postorder(b->end()->SecondSuccessor(), visited, order, loop_header);
784 Postorder(b->end()->FirstSuccessor(), visited, order, loop_header);
785 if (b->IsLoopHeader() && b != loop->loop_header()) {
786 PostorderLoopBlocks(b->loop_information(), visited, order, loop_header);
787 }
788 }
789}
790
791
792void HGraph::Postorder(HBasicBlock* block,
793 BitVector* visited,
794 ZoneList<HBasicBlock*>* order,
795 HBasicBlock* loop_header) {
796 if (block == NULL || visited->Contains(block->block_id())) return;
797 if (block->parent_loop_header() != loop_header) return;
798 visited->Add(block->block_id());
799 if (block->IsLoopHeader()) {
800 PostorderLoopBlocks(block->loop_information(), visited, order, loop_header);
801 Postorder(block->end()->SecondSuccessor(), visited, order, block);
802 Postorder(block->end()->FirstSuccessor(), visited, order, block);
803 } else {
804 Postorder(block->end()->SecondSuccessor(), visited, order, loop_header);
805 Postorder(block->end()->FirstSuccessor(), visited, order, loop_header);
806 }
807 ASSERT(block->end()->FirstSuccessor() == NULL ||
808 order->Contains(block->end()->FirstSuccessor()) ||
809 block->end()->FirstSuccessor()->IsLoopHeader());
810 ASSERT(block->end()->SecondSuccessor() == NULL ||
811 order->Contains(block->end()->SecondSuccessor()) ||
812 block->end()->SecondSuccessor()->IsLoopHeader());
813 order->Add(block);
814}
815
816
817void HGraph::AssignDominators() {
818 HPhase phase("Assign dominators", this);
819 for (int i = 0; i < blocks_.length(); ++i) {
820 if (blocks_[i]->IsLoopHeader()) {
821 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first());
822 } else {
823 for (int j = 0; j < blocks_[i]->predecessors()->length(); ++j) {
824 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
825 }
826 }
827 }
828}
829
830
831void HGraph::EliminateRedundantPhis() {
832 HPhase phase("Phi elimination", this);
833 ZoneList<HValue*> uses_to_replace(2);
834
835 // Worklist of phis that can potentially be eliminated. Initialized
836 // with all phi nodes. When elimination of a phi node modifies
837 // another phi node the modified phi node is added to the worklist.
838 ZoneList<HPhi*> worklist(blocks_.length());
839 for (int i = 0; i < blocks_.length(); ++i) {
840 worklist.AddAll(*blocks_[i]->phis());
841 }
842
843 while (!worklist.is_empty()) {
844 HPhi* phi = worklist.RemoveLast();
845 HBasicBlock* block = phi->block();
846
847 // Skip phi node if it was already replaced.
848 if (block == NULL) continue;
849
850 // Get replacement value if phi is redundant.
851 HValue* value = phi->GetRedundantReplacement();
852
853 if (value != NULL) {
854 // Iterate through uses finding the ones that should be
855 // replaced.
856 const ZoneList<HValue*>* uses = phi->uses();
857 for (int i = 0; i < uses->length(); ++i) {
858 HValue* use = uses->at(i);
859 if (!use->block()->IsStartBlock()) {
860 uses_to_replace.Add(use);
861 }
862 }
863 // Replace the uses and add phis modified to the work list.
864 for (int i = 0; i < uses_to_replace.length(); ++i) {
865 HValue* use = uses_to_replace[i];
866 phi->ReplaceAtUse(use, value);
867 if (use->IsPhi()) worklist.Add(HPhi::cast(use));
868 }
869 uses_to_replace.Rewind(0);
870 block->RemovePhi(phi);
871 } else if (phi->HasNoUses() &&
872 !phi->HasReceiverOperand() &&
873 FLAG_eliminate_dead_phis) {
874 // We can't eliminate phis that have the receiver as an operand
875 // because in case of throwing an error we need the correct
876 // receiver value in the environment to construct a corrent
877 // stack trace.
878 block->RemovePhi(phi);
879 block->RecordDeletedPhi(phi->merged_index());
880 }
881 }
882}
883
884
885bool HGraph::CollectPhis() {
886 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
887 phi_list_ = new ZoneList<HPhi*>(blocks->length());
888 for (int i = 0; i < blocks->length(); ++i) {
889 for (int j = 0; j < blocks->at(i)->phis()->length(); j++) {
890 HPhi* phi = blocks->at(i)->phis()->at(j);
891 phi_list_->Add(phi);
892 // We don't support phi uses of arguments for now.
893 if (phi->CheckFlag(HValue::kIsArguments)) return false;
894 }
895 }
896 return true;
897}
898
899
900void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
901 BitVector in_worklist(GetMaximumValueID());
902 for (int i = 0; i < worklist->length(); ++i) {
903 ASSERT(!in_worklist.Contains(worklist->at(i)->id()));
904 in_worklist.Add(worklist->at(i)->id());
905 }
906
907 while (!worklist->is_empty()) {
908 HValue* current = worklist->RemoveLast();
909 in_worklist.Remove(current->id());
910 if (current->UpdateInferredType()) {
911 for (int j = 0; j < current->uses()->length(); j++) {
912 HValue* use = current->uses()->at(j);
913 if (!in_worklist.Contains(use->id())) {
914 in_worklist.Add(use->id());
915 worklist->Add(use);
916 }
917 }
918 }
919 }
920}
921
922
923class HRangeAnalysis BASE_EMBEDDED {
924 public:
925 explicit HRangeAnalysis(HGraph* graph) : graph_(graph), changed_ranges_(16) {}
926
927 void Analyze();
928
929 private:
930 void TraceRange(const char* msg, ...);
931 void Analyze(HBasicBlock* block);
932 void InferControlFlowRange(HBranch* branch, HBasicBlock* dest);
933 void InferControlFlowRange(Token::Value op, HValue* value, HValue* other);
934 void InferPhiRange(HPhi* phi);
935 void InferRange(HValue* value);
936 void RollBackTo(int index);
937 void AddRange(HValue* value, Range* range);
938
939 HGraph* graph_;
940 ZoneList<HValue*> changed_ranges_;
941};
942
943
944void HRangeAnalysis::TraceRange(const char* msg, ...) {
945 if (FLAG_trace_range) {
946 va_list arguments;
947 va_start(arguments, msg);
948 OS::VPrint(msg, arguments);
949 va_end(arguments);
950 }
951}
952
953
954void HRangeAnalysis::Analyze() {
955 HPhase phase("Range analysis", graph_);
956 Analyze(graph_->blocks()->at(0));
957}
958
959
960void HRangeAnalysis::Analyze(HBasicBlock* block) {
961 TraceRange("Analyzing block B%d\n", block->block_id());
962
963 int last_changed_range = changed_ranges_.length() - 1;
964
965 // Infer range based on control flow.
966 if (block->predecessors()->length() == 1) {
967 HBasicBlock* pred = block->predecessors()->first();
968 if (pred->end()->IsBranch()) {
969 InferControlFlowRange(HBranch::cast(pred->end()), block);
970 }
971 }
972
973 // Process phi instructions.
974 for (int i = 0; i < block->phis()->length(); ++i) {
975 HPhi* phi = block->phis()->at(i);
976 InferPhiRange(phi);
977 }
978
979 // Go through all instructions of the current block.
980 HInstruction* instr = block->first();
981 while (instr != block->end()) {
982 InferRange(instr);
983 instr = instr->next();
984 }
985
986 // Continue analysis in all dominated blocks.
987 for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
988 Analyze(block->dominated_blocks()->at(i));
989 }
990
991 RollBackTo(last_changed_range);
992}
993
994
995void HRangeAnalysis::InferControlFlowRange(HBranch* branch, HBasicBlock* dest) {
996 ASSERT(branch->FirstSuccessor() == dest || branch->SecondSuccessor() == dest);
997 ASSERT(branch->FirstSuccessor() != dest || branch->SecondSuccessor() != dest);
998
999 if (branch->value()->IsCompare()) {
1000 HCompare* compare = HCompare::cast(branch->value());
1001 Token::Value op = compare->token();
1002 if (branch->SecondSuccessor() == dest) {
1003 op = Token::NegateCompareOp(op);
1004 }
1005 Token::Value inverted_op = Token::InvertCompareOp(op);
1006 InferControlFlowRange(op, compare->left(), compare->right());
1007 InferControlFlowRange(inverted_op, compare->right(), compare->left());
1008 }
1009}
1010
1011
1012// We know that value [op] other. Use this information to update the range on
1013// value.
1014void HRangeAnalysis::InferControlFlowRange(Token::Value op,
1015 HValue* value,
1016 HValue* other) {
1017 Range* range = other->range();
1018 if (range == NULL) range = new Range();
1019 Range* new_range = NULL;
1020
1021 TraceRange("Control flow range infer %d %s %d\n",
1022 value->id(),
1023 Token::Name(op),
1024 other->id());
1025
1026 if (op == Token::EQ || op == Token::EQ_STRICT) {
1027 // The same range has to apply for value.
1028 new_range = range->Copy();
1029 } else if (op == Token::LT || op == Token::LTE) {
1030 new_range = range->CopyClearLower();
1031 if (op == Token::LT) {
1032 new_range->AddConstant(-1);
1033 }
1034 } else if (op == Token::GT || op == Token::GTE) {
1035 new_range = range->CopyClearUpper();
1036 if (op == Token::GT) {
1037 new_range->AddConstant(1);
1038 }
1039 }
1040
1041 if (new_range != NULL && !new_range->IsMostGeneric()) {
1042 AddRange(value, new_range);
1043 }
1044}
1045
1046
1047void HRangeAnalysis::InferPhiRange(HPhi* phi) {
1048 // TODO(twuerthinger): Infer loop phi ranges.
1049 InferRange(phi);
1050}
1051
1052
1053void HRangeAnalysis::InferRange(HValue* value) {
1054 ASSERT(!value->HasRange());
1055 if (!value->representation().IsNone()) {
1056 value->ComputeInitialRange();
1057 Range* range = value->range();
1058 TraceRange("Initial inferred range of %d (%s) set to [%d,%d]\n",
1059 value->id(),
1060 value->Mnemonic(),
1061 range->lower(),
1062 range->upper());
1063 }
1064}
1065
1066
1067void HRangeAnalysis::RollBackTo(int index) {
1068 for (int i = index + 1; i < changed_ranges_.length(); ++i) {
1069 changed_ranges_[i]->RemoveLastAddedRange();
1070 }
1071 changed_ranges_.Rewind(index + 1);
1072}
1073
1074
1075void HRangeAnalysis::AddRange(HValue* value, Range* range) {
1076 Range* original_range = value->range();
1077 value->AddNewRange(range);
1078 changed_ranges_.Add(value);
1079 Range* new_range = value->range();
1080 TraceRange("Updated range of %d set to [%d,%d]\n",
1081 value->id(),
1082 new_range->lower(),
1083 new_range->upper());
1084 if (original_range != NULL) {
1085 TraceRange("Original range was [%d,%d]\n",
1086 original_range->lower(),
1087 original_range->upper());
1088 }
1089 TraceRange("New information was [%d,%d]\n",
1090 range->lower(),
1091 range->upper());
1092}
1093
1094
1095void TraceGVN(const char* msg, ...) {
1096 if (FLAG_trace_gvn) {
1097 va_list arguments;
1098 va_start(arguments, msg);
1099 OS::VPrint(msg, arguments);
1100 va_end(arguments);
1101 }
1102}
1103
1104
1105HValueMap::HValueMap(const HValueMap* other)
1106 : array_size_(other->array_size_),
1107 lists_size_(other->lists_size_),
1108 count_(other->count_),
1109 present_flags_(other->present_flags_),
1110 array_(Zone::NewArray<HValueMapListElement>(other->array_size_)),
1111 lists_(Zone::NewArray<HValueMapListElement>(other->lists_size_)),
1112 free_list_head_(other->free_list_head_) {
1113 memcpy(array_, other->array_, array_size_ * sizeof(HValueMapListElement));
1114 memcpy(lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
1115}
1116
1117
1118void HValueMap::Kill(int flags) {
1119 int depends_flags = HValue::ConvertChangesToDependsFlags(flags);
1120 if ((present_flags_ & depends_flags) == 0) return;
1121 present_flags_ = 0;
1122 for (int i = 0; i < array_size_; ++i) {
1123 HValue* value = array_[i].value;
1124 if (value != NULL) {
1125 // Clear list of collisions first, so we know if it becomes empty.
1126 int kept = kNil; // List of kept elements.
1127 int next;
1128 for (int current = array_[i].next; current != kNil; current = next) {
1129 next = lists_[current].next;
1130 if ((lists_[current].value->flags() & depends_flags) != 0) {
1131 // Drop it.
1132 count_--;
1133 lists_[current].next = free_list_head_;
1134 free_list_head_ = current;
1135 } else {
1136 // Keep it.
1137 lists_[current].next = kept;
1138 kept = current;
1139 present_flags_ |= lists_[current].value->flags();
1140 }
1141 }
1142 array_[i].next = kept;
1143
1144 // Now possibly drop directly indexed element.
1145 if ((array_[i].value->flags() & depends_flags) != 0) { // Drop it.
1146 count_--;
1147 int head = array_[i].next;
1148 if (head == kNil) {
1149 array_[i].value = NULL;
1150 } else {
1151 array_[i].value = lists_[head].value;
1152 array_[i].next = lists_[head].next;
1153 lists_[head].next = free_list_head_;
1154 free_list_head_ = head;
1155 }
1156 } else {
1157 present_flags_ |= array_[i].value->flags(); // Keep it.
1158 }
1159 }
1160 }
1161}
1162
1163
1164HValue* HValueMap::Lookup(HValue* value) const {
1165 uint32_t hash = static_cast<uint32_t>(value->Hashcode());
1166 uint32_t pos = Bound(hash);
1167 if (array_[pos].value != NULL) {
1168 if (array_[pos].value->Equals(value)) return array_[pos].value;
1169 int next = array_[pos].next;
1170 while (next != kNil) {
1171 if (lists_[next].value->Equals(value)) return lists_[next].value;
1172 next = lists_[next].next;
1173 }
1174 }
1175 return NULL;
1176}
1177
1178
1179void HValueMap::Resize(int new_size) {
1180 ASSERT(new_size > count_);
1181 // Hashing the values into the new array has no more collisions than in the
1182 // old hash map, so we can use the existing lists_ array, if we are careful.
1183
1184 // Make sure we have at least one free element.
1185 if (free_list_head_ == kNil) {
1186 ResizeLists(lists_size_ << 1);
1187 }
1188
1189 HValueMapListElement* new_array =
1190 Zone::NewArray<HValueMapListElement>(new_size);
1191 memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
1192
1193 HValueMapListElement* old_array = array_;
1194 int old_size = array_size_;
1195
1196 int old_count = count_;
1197 count_ = 0;
1198 // Do not modify present_flags_. It is currently correct.
1199 array_size_ = new_size;
1200 array_ = new_array;
1201
1202 if (old_array != NULL) {
1203 // Iterate over all the elements in lists, rehashing them.
1204 for (int i = 0; i < old_size; ++i) {
1205 if (old_array[i].value != NULL) {
1206 int current = old_array[i].next;
1207 while (current != kNil) {
1208 Insert(lists_[current].value);
1209 int next = lists_[current].next;
1210 lists_[current].next = free_list_head_;
1211 free_list_head_ = current;
1212 current = next;
1213 }
1214 // Rehash the directly stored value.
1215 Insert(old_array[i].value);
1216 }
1217 }
1218 }
1219 USE(old_count);
1220 ASSERT(count_ == old_count);
1221}
1222
1223
1224void HValueMap::ResizeLists(int new_size) {
1225 ASSERT(new_size > lists_size_);
1226
1227 HValueMapListElement* new_lists =
1228 Zone::NewArray<HValueMapListElement>(new_size);
1229 memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
1230
1231 HValueMapListElement* old_lists = lists_;
1232 int old_size = lists_size_;
1233
1234 lists_size_ = new_size;
1235 lists_ = new_lists;
1236
1237 if (old_lists != NULL) {
1238 memcpy(lists_, old_lists, old_size * sizeof(HValueMapListElement));
1239 }
1240 for (int i = old_size; i < lists_size_; ++i) {
1241 lists_[i].next = free_list_head_;
1242 free_list_head_ = i;
1243 }
1244}
1245
1246
1247void HValueMap::Insert(HValue* value) {
1248 ASSERT(value != NULL);
1249 // Resizing when half of the hashtable is filled up.
1250 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1);
1251 ASSERT(count_ < array_size_);
1252 count_++;
1253 uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
1254 if (array_[pos].value == NULL) {
1255 array_[pos].value = value;
1256 array_[pos].next = kNil;
1257 } else {
1258 if (free_list_head_ == kNil) {
1259 ResizeLists(lists_size_ << 1);
1260 }
1261 int new_element_pos = free_list_head_;
1262 ASSERT(new_element_pos != kNil);
1263 free_list_head_ = lists_[free_list_head_].next;
1264 lists_[new_element_pos].value = value;
1265 lists_[new_element_pos].next = array_[pos].next;
1266 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL);
1267 array_[pos].next = new_element_pos;
1268 }
1269}
1270
1271
1272class HStackCheckEliminator BASE_EMBEDDED {
1273 public:
1274 explicit HStackCheckEliminator(HGraph* graph) : graph_(graph) { }
1275
1276 void Process();
1277
1278 private:
1279 void RemoveStackCheck(HBasicBlock* block);
1280
1281 HGraph* graph_;
1282};
1283
1284
1285void HStackCheckEliminator::Process() {
1286 // For each loop block walk the dominator tree from the backwards branch to
1287 // the loop header. If a call instruction is encountered the backwards branch
1288 // is dominated by a call and the stack check in the backwards branch can be
1289 // removed.
1290 for (int i = 0; i < graph_->blocks()->length(); i++) {
1291 HBasicBlock* block = graph_->blocks()->at(i);
1292 if (block->IsLoopHeader()) {
1293 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1294 HBasicBlock* dominator = back_edge;
1295 bool back_edge_dominated_by_call = false;
1296 while (dominator != block && !back_edge_dominated_by_call) {
1297 HInstruction* instr = dominator->first();
1298 while (instr != NULL && !back_edge_dominated_by_call) {
1299 if (instr->IsCall()) {
1300 RemoveStackCheck(back_edge);
1301 back_edge_dominated_by_call = true;
1302 }
1303 instr = instr->next();
1304 }
1305 dominator = dominator->dominator();
1306 }
1307 }
1308 }
1309}
1310
1311
1312void HStackCheckEliminator::RemoveStackCheck(HBasicBlock* block) {
1313 HInstruction* instr = block->first();
1314 while (instr != NULL) {
1315 if (instr->IsGoto()) {
1316 HGoto::cast(instr)->set_include_stack_check(false);
1317 return;
1318 }
1319 instr = instr->next();
1320 }
1321}
1322
1323
1324class HGlobalValueNumberer BASE_EMBEDDED {
1325 public:
1326 explicit HGlobalValueNumberer(HGraph* graph)
1327 : graph_(graph),
1328 block_side_effects_(graph_->blocks()->length()),
1329 loop_side_effects_(graph_->blocks()->length()) {
1330 ASSERT(Heap::allow_allocation(false));
1331 block_side_effects_.AddBlock(0, graph_->blocks()->length());
1332 loop_side_effects_.AddBlock(0, graph_->blocks()->length());
1333 }
1334 ~HGlobalValueNumberer() {
1335 ASSERT(!Heap::allow_allocation(true));
1336 }
1337
1338 void Analyze();
1339
1340 private:
1341 void AnalyzeBlock(HBasicBlock* block, HValueMap* map);
1342 void ComputeBlockSideEffects();
1343 void LoopInvariantCodeMotion();
1344 void ProcessLoopBlock(HBasicBlock* block,
1345 HBasicBlock* before_loop,
1346 int loop_kills);
1347 bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
1348
1349 HGraph* graph_;
1350
1351 // A map of block IDs to their side effects.
1352 ZoneList<int> block_side_effects_;
1353
1354 // A map of loop header block IDs to their loop's side effects.
1355 ZoneList<int> loop_side_effects_;
1356};
1357
1358
1359void HGlobalValueNumberer::Analyze() {
1360 ComputeBlockSideEffects();
1361 if (FLAG_loop_invariant_code_motion) {
1362 LoopInvariantCodeMotion();
1363 }
1364 HValueMap* map = new HValueMap();
1365 AnalyzeBlock(graph_->blocks()->at(0), map);
1366}
1367
1368
1369void HGlobalValueNumberer::ComputeBlockSideEffects() {
1370 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
1371 // Compute side effects for the block.
1372 HBasicBlock* block = graph_->blocks()->at(i);
1373 HInstruction* instr = block->first();
1374 int id = block->block_id();
1375 int side_effects = 0;
1376 while (instr != NULL) {
1377 side_effects |= (instr->flags() & HValue::ChangesFlagsMask());
1378 instr = instr->next();
1379 }
1380 block_side_effects_[id] |= side_effects;
1381
1382 // Loop headers are part of their loop.
1383 if (block->IsLoopHeader()) {
1384 loop_side_effects_[id] |= side_effects;
1385 }
1386
1387 // Propagate loop side effects upwards.
1388 if (block->HasParentLoopHeader()) {
1389 int header_id = block->parent_loop_header()->block_id();
1390 loop_side_effects_[header_id] |=
1391 block->IsLoopHeader() ? loop_side_effects_[id] : side_effects;
1392 }
1393 }
1394}
1395
1396
1397void HGlobalValueNumberer::LoopInvariantCodeMotion() {
1398 for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
1399 HBasicBlock* block = graph_->blocks()->at(i);
1400 if (block->IsLoopHeader()) {
1401 int side_effects = loop_side_effects_[block->block_id()];
1402 TraceGVN("Try loop invariant motion for block B%d effects=0x%x\n",
1403 block->block_id(),
1404 side_effects);
1405
1406 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
1407 for (int j = block->block_id(); j <= last->block_id(); ++j) {
1408 ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects);
1409 }
1410 }
1411 }
1412}
1413
1414
1415void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block,
1416 HBasicBlock* loop_header,
1417 int loop_kills) {
1418 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
1419 int depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
1420 TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n",
1421 block->block_id(),
1422 depends_flags);
1423 HInstruction* instr = block->first();
1424 while (instr != NULL) {
1425 HInstruction* next = instr->next();
1426 if (instr->CheckFlag(HValue::kUseGVN) &&
1427 (instr->flags() & depends_flags) == 0) {
1428 TraceGVN("Checking instruction %d (%s)\n",
1429 instr->id(),
1430 instr->Mnemonic());
1431 bool inputs_loop_invariant = true;
1432 for (int i = 0; i < instr->OperandCount(); ++i) {
1433 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
1434 inputs_loop_invariant = false;
1435 }
1436 }
1437
1438 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
1439 TraceGVN("Found loop invariant instruction %d\n", instr->id());
1440 // Move the instruction out of the loop.
1441 instr->Unlink();
1442 instr->InsertBefore(pre_header->end());
1443 }
1444 }
1445 instr = next;
1446 }
1447}
1448
1449// Only move instructions that postdominate the loop header (i.e. are
1450// always executed inside the loop). This is to avoid unnecessary
1451// deoptimizations assuming the loop is executed at least once.
1452// TODO(fschneider): Better type feedback should give us information
1453// about code that was never executed.
1454bool HGlobalValueNumberer::ShouldMove(HInstruction* instr,
1455 HBasicBlock* loop_header) {
1456 if (!instr->IsChange() &&
1457 FLAG_aggressive_loop_invariant_motion) return true;
1458 HBasicBlock* block = instr->block();
1459 bool result = true;
1460 if (block != loop_header) {
1461 for (int i = 1; i < loop_header->predecessors()->length(); ++i) {
1462 bool found = false;
1463 HBasicBlock* pred = loop_header->predecessors()->at(i);
1464 while (pred != loop_header) {
1465 if (pred == block) found = true;
1466 pred = pred->dominator();
1467 }
1468 if (!found) {
1469 result = false;
1470 break;
1471 }
1472 }
1473 }
1474 return result;
1475}
1476
1477
1478void HGlobalValueNumberer::AnalyzeBlock(HBasicBlock* block, HValueMap* map) {
1479 TraceGVN("Analyzing block B%d\n", block->block_id());
1480
1481 // If this is a loop header kill everything killed by the loop.
1482 if (block->IsLoopHeader()) {
1483 map->Kill(loop_side_effects_[block->block_id()]);
1484 }
1485
1486 // Go through all instructions of the current block.
1487 HInstruction* instr = block->first();
1488 while (instr != NULL) {
1489 HInstruction* next = instr->next();
1490 int flags = (instr->flags() & HValue::ChangesFlagsMask());
1491 if (flags != 0) {
1492 ASSERT(!instr->CheckFlag(HValue::kUseGVN));
1493 // Clear all instructions in the map that are affected by side effects.
1494 map->Kill(flags);
1495 TraceGVN("Instruction %d kills\n", instr->id());
1496 } else if (instr->CheckFlag(HValue::kUseGVN)) {
1497 HValue* other = map->Lookup(instr);
1498 if (other != NULL) {
1499 ASSERT(instr->Equals(other) && other->Equals(instr));
1500 TraceGVN("Replacing value %d (%s) with value %d (%s)\n",
1501 instr->id(),
1502 instr->Mnemonic(),
1503 other->id(),
1504 other->Mnemonic());
1505 instr->ReplaceValue(other);
1506 instr->Delete();
1507 } else {
1508 map->Add(instr);
1509 }
1510 }
1511 instr = next;
1512 }
1513
1514 // Recursively continue analysis for all immediately dominated blocks.
1515 int length = block->dominated_blocks()->length();
1516 for (int i = 0; i < length; ++i) {
1517 HBasicBlock* dominated = block->dominated_blocks()->at(i);
1518 // No need to copy the map for the last child in the dominator tree.
1519 HValueMap* successor_map = (i == length - 1) ? map : map->Copy();
1520
1521 // If the dominated block is not a successor to this block we have to
1522 // kill everything killed on any path between this block and the
1523 // dominated block. Note we rely on the block ordering.
1524 bool is_successor = false;
1525 int predecessor_count = dominated->predecessors()->length();
1526 for (int j = 0; !is_successor && j < predecessor_count; ++j) {
1527 is_successor = (dominated->predecessors()->at(j) == block);
1528 }
1529
1530 if (!is_successor) {
1531 int side_effects = 0;
1532 for (int j = block->block_id() + 1; j < dominated->block_id(); ++j) {
1533 side_effects |= block_side_effects_[j];
1534 }
1535 successor_map->Kill(side_effects);
1536 }
1537
1538 AnalyzeBlock(dominated, successor_map);
1539 }
1540}
1541
1542
1543class HInferRepresentation BASE_EMBEDDED {
1544 public:
1545 explicit HInferRepresentation(HGraph* graph)
1546 : graph_(graph), worklist_(8), in_worklist_(graph->GetMaximumValueID()) {}
1547
1548 void Analyze();
1549
1550 private:
1551 Representation TryChange(HValue* current);
1552 void AddToWorklist(HValue* current);
1553 void InferBasedOnInputs(HValue* current);
1554 void AddDependantsToWorklist(HValue* current);
1555 void InferBasedOnUses(HValue* current);
1556
1557 HGraph* graph_;
1558 ZoneList<HValue*> worklist_;
1559 BitVector in_worklist_;
1560};
1561
1562
1563void HInferRepresentation::AddToWorklist(HValue* current) {
1564 if (current->representation().IsSpecialization()) return;
1565 if (!current->CheckFlag(HValue::kFlexibleRepresentation)) return;
1566 if (in_worklist_.Contains(current->id())) return;
1567 worklist_.Add(current);
1568 in_worklist_.Add(current->id());
1569}
1570
1571
1572// This method tries to specialize the representation type of the value
1573// given as a parameter. The value is asked to infer its representation type
1574// based on its inputs. If the inferred type is more specialized, then this
1575// becomes the new representation type of the node.
1576void HInferRepresentation::InferBasedOnInputs(HValue* current) {
1577 Representation r = current->representation();
1578 if (r.IsSpecialization()) return;
1579 ASSERT(current->CheckFlag(HValue::kFlexibleRepresentation));
1580 Representation inferred = current->InferredRepresentation();
1581 if (inferred.IsSpecialization()) {
1582 current->ChangeRepresentation(inferred);
1583 AddDependantsToWorklist(current);
1584 }
1585}
1586
1587
1588void HInferRepresentation::AddDependantsToWorklist(HValue* current) {
1589 for (int i = 0; i < current->uses()->length(); ++i) {
1590 AddToWorklist(current->uses()->at(i));
1591 }
1592 for (int i = 0; i < current->OperandCount(); ++i) {
1593 AddToWorklist(current->OperandAt(i));
1594 }
1595}
1596
1597
1598// This method calculates whether specializing the representation of the value
1599// given as the parameter has a benefit in terms of less necessary type
1600// conversions. If there is a benefit, then the representation of the value is
1601// specialized.
1602void HInferRepresentation::InferBasedOnUses(HValue* current) {
1603 Representation r = current->representation();
1604 if (r.IsSpecialization() || current->HasNoUses()) return;
1605 ASSERT(current->CheckFlag(HValue::kFlexibleRepresentation));
1606 Representation new_rep = TryChange(current);
1607 if (!new_rep.IsNone()) {
1608 if (!current->representation().Equals(new_rep)) {
1609 current->ChangeRepresentation(new_rep);
1610 AddDependantsToWorklist(current);
1611 }
1612 }
1613}
1614
1615
1616Representation HInferRepresentation::TryChange(HValue* current) {
1617 // Array of use counts for each representation.
1618 int use_count[Representation::kNumRepresentations];
1619 for (int i = 0; i < Representation::kNumRepresentations; i++) {
1620 use_count[i] = 0;
1621 }
1622
1623 for (int i = 0; i < current->uses()->length(); ++i) {
1624 HValue* use = current->uses()->at(i);
1625 int index = use->LookupOperandIndex(0, current);
1626 Representation req_rep = use->RequiredInputRepresentation(index);
1627 if (req_rep.IsNone()) continue;
1628 if (use->IsPhi()) {
1629 HPhi* phi = HPhi::cast(use);
1630 phi->AddIndirectUsesTo(&use_count[0]);
1631 }
1632 use_count[req_rep.kind()]++;
1633 }
1634 int tagged_count = use_count[Representation::kTagged];
1635 int double_count = use_count[Representation::kDouble];
1636 int int32_count = use_count[Representation::kInteger32];
1637 int non_tagged_count = double_count + int32_count;
1638
1639 // If a non-loop phi has tagged uses, don't convert it to untagged.
1640 if (current->IsPhi() && !current->block()->IsLoopHeader()) {
1641 if (tagged_count > 0) return Representation::None();
1642 }
1643
1644 if (non_tagged_count >= tagged_count) {
1645 // More untagged than tagged.
1646 if (double_count > 0) {
1647 // There is at least one usage that is a double => guess that the
1648 // correct representation is double.
1649 return Representation::Double();
1650 } else if (int32_count > 0) {
1651 return Representation::Integer32();
1652 }
1653 }
1654 return Representation::None();
1655}
1656
1657
1658void HInferRepresentation::Analyze() {
1659 HPhase phase("Infer representations", graph_);
1660
1661 // (1) Initialize bit vectors and count real uses. Each phi
1662 // gets a bit-vector of length <number of phis>.
1663 const ZoneList<HPhi*>* phi_list = graph_->phi_list();
1664 int num_phis = phi_list->length();
1665 ScopedVector<BitVector*> connected_phis(num_phis);
1666 for (int i = 0; i < num_phis; i++) {
1667 phi_list->at(i)->InitRealUses(i);
1668 connected_phis[i] = new BitVector(num_phis);
1669 connected_phis[i]->Add(i);
1670 }
1671
1672 // (2) Do a fixed point iteration to find the set of connected phis.
1673 // A phi is connected to another phi if its value is used either
1674 // directly or indirectly through a transitive closure of the def-use
1675 // relation.
1676 bool change = true;
1677 while (change) {
1678 change = false;
1679 for (int i = 0; i < num_phis; i++) {
1680 HPhi* phi = phi_list->at(i);
1681 for (int j = 0; j < phi->uses()->length(); j++) {
1682 HValue* use = phi->uses()->at(j);
1683 if (use->IsPhi()) {
1684 int phi_use = HPhi::cast(use)->phi_id();
1685 if (connected_phis[i]->UnionIsChanged(*connected_phis[phi_use])) {
1686 change = true;
1687 }
1688 }
1689 }
1690 }
1691 }
1692
1693 // (3) Sum up the non-phi use counts of all connected phis.
1694 // Don't include the non-phi uses of the phi itself.
1695 for (int i = 0; i < num_phis; i++) {
1696 HPhi* phi = phi_list->at(i);
1697 for (BitVector::Iterator it(connected_phis.at(i));
1698 !it.Done();
1699 it.Advance()) {
1700 int index = it.Current();
1701 if (index != i) {
1702 HPhi* it_use = phi_list->at(it.Current());
1703 phi->AddNonPhiUsesFrom(it_use);
1704 }
1705 }
1706 }
1707
1708 for (int i = 0; i < graph_->blocks()->length(); ++i) {
1709 HBasicBlock* block = graph_->blocks()->at(i);
1710 const ZoneList<HPhi*>* phis = block->phis();
1711 for (int j = 0; j < phis->length(); ++j) {
1712 AddToWorklist(phis->at(j));
1713 }
1714
1715 HInstruction* current = block->first();
1716 while (current != NULL) {
1717 AddToWorklist(current);
1718 current = current->next();
1719 }
1720 }
1721
1722 while (!worklist_.is_empty()) {
1723 HValue* current = worklist_.RemoveLast();
1724 in_worklist_.Remove(current->id());
1725 InferBasedOnInputs(current);
1726 InferBasedOnUses(current);
1727 }
1728}
1729
1730
1731void HGraph::InitializeInferredTypes() {
1732 HPhase phase("Inferring types", this);
1733 InitializeInferredTypes(0, this->blocks_.length() - 1);
1734}
1735
1736
1737void HGraph::InitializeInferredTypes(int from_inclusive, int to_inclusive) {
1738 for (int i = from_inclusive; i <= to_inclusive; ++i) {
1739 HBasicBlock* block = blocks_[i];
1740
1741 const ZoneList<HPhi*>* phis = block->phis();
1742 for (int j = 0; j < phis->length(); j++) {
1743 phis->at(j)->UpdateInferredType();
1744 }
1745
1746 HInstruction* current = block->first();
1747 while (current != NULL) {
1748 current->UpdateInferredType();
1749 current = current->next();
1750 }
1751
1752 if (block->IsLoopHeader()) {
1753 HBasicBlock* last_back_edge =
1754 block->loop_information()->GetLastBackEdge();
1755 InitializeInferredTypes(i + 1, last_back_edge->block_id());
1756 // Skip all blocks already processed by the recursive call.
1757 i = last_back_edge->block_id();
1758 // Update phis of the loop header now after the whole loop body is
1759 // guaranteed to be processed.
1760 ZoneList<HValue*> worklist(block->phis()->length());
1761 for (int j = 0; j < block->phis()->length(); ++j) {
1762 worklist.Add(block->phis()->at(j));
1763 }
1764 InferTypes(&worklist);
1765 }
1766 }
1767}
1768
1769
1770void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) {
1771 HValue* current = value;
1772 while (current != NULL) {
1773 if (visited->Contains(current->id())) return;
1774
1775 // For phis, we must propagate the check to all of its inputs.
1776 if (current->IsPhi()) {
1777 visited->Add(current->id());
1778 HPhi* phi = HPhi::cast(current);
1779 for (int i = 0; i < phi->OperandCount(); ++i) {
1780 PropagateMinusZeroChecks(phi->OperandAt(i), visited);
1781 }
1782 break;
1783 }
1784
1785 // For multiplication and division, we must propagate to the left and
1786 // the right side.
1787 if (current->IsMul()) {
1788 HMul* mul = HMul::cast(current);
1789 mul->EnsureAndPropagateNotMinusZero(visited);
1790 PropagateMinusZeroChecks(mul->left(), visited);
1791 PropagateMinusZeroChecks(mul->right(), visited);
1792 } else if (current->IsDiv()) {
1793 HDiv* div = HDiv::cast(current);
1794 div->EnsureAndPropagateNotMinusZero(visited);
1795 PropagateMinusZeroChecks(div->left(), visited);
1796 PropagateMinusZeroChecks(div->right(), visited);
1797 }
1798
1799 current = current->EnsureAndPropagateNotMinusZero(visited);
1800 }
1801}
1802
1803
1804void HGraph::InsertRepresentationChangeForUse(HValue* value,
1805 HValue* use,
1806 Representation to,
1807 bool is_truncating) {
1808 // Propagate flags for negative zero checks upwards from conversions
1809 // int32-to-tagged and int32-to-double.
1810 Representation from = value->representation();
1811 if (from.IsInteger32()) {
1812 ASSERT(to.IsTagged() || to.IsDouble());
1813 BitVector visited(GetMaximumValueID());
1814 PropagateMinusZeroChecks(value, &visited);
1815 }
1816
1817 // Insert the representation change right before its use. For phi-uses we
1818 // insert at the end of the corresponding predecessor.
1819 HBasicBlock* insert_block = use->block();
1820 if (use->IsPhi()) {
1821 int index = 0;
1822 while (use->OperandAt(index) != value) ++index;
1823 insert_block = insert_block->predecessors()->at(index);
1824 }
1825
1826 HInstruction* next = (insert_block == use->block())
1827 ? HInstruction::cast(use)
1828 : insert_block->end();
1829
1830 // For constants we try to make the representation change at compile
1831 // time. When a representation change is not possible without loss of
1832 // information we treat constants like normal instructions and insert the
1833 // change instructions for them.
1834 HInstruction* new_value = NULL;
1835 if (value->IsConstant()) {
1836 HConstant* constant = HConstant::cast(value);
1837 // Try to create a new copy of the constant with the new representation.
1838 new_value = is_truncating
1839 ? constant->CopyToTruncatedInt32()
1840 : constant->CopyToRepresentation(to);
1841 }
1842
1843 if (new_value == NULL) {
1844 new_value = new HChange(value, value->representation(), to);
1845 }
1846
1847 new_value->InsertBefore(next);
1848 value->ReplaceFirstAtUse(use, new_value, to);
1849}
1850
1851
1852int CompareConversionUses(HValue* a,
1853 HValue* b,
1854 Representation a_rep,
1855 Representation b_rep) {
1856 if (a_rep.kind() > b_rep.kind()) {
1857 // Make sure specializations are separated in the result array.
1858 return 1;
1859 }
1860 // Put truncating conversions before non-truncating conversions.
1861 bool a_truncate = a->CheckFlag(HValue::kTruncatingToInt32);
1862 bool b_truncate = b->CheckFlag(HValue::kTruncatingToInt32);
1863 if (a_truncate != b_truncate) {
1864 return a_truncate ? -1 : 1;
1865 }
1866 // Sort by increasing block ID.
1867 return a->block()->block_id() - b->block()->block_id();
1868}
1869
1870
1871void HGraph::InsertRepresentationChanges(HValue* current) {
1872 Representation r = current->representation();
1873 if (r.IsNone()) return;
1874 if (current->uses()->length() == 0) return;
1875
1876 // Collect the representation changes in a sorted list. This allows
1877 // us to avoid duplicate changes without searching the list.
1878 ZoneList<HValue*> to_convert(2);
1879 ZoneList<Representation> to_convert_reps(2);
1880 for (int i = 0; i < current->uses()->length(); ++i) {
1881 HValue* use = current->uses()->at(i);
1882 // The occurrences index means the index within the operand array of "use"
1883 // at which "current" is used. While iterating through the use array we
1884 // also have to iterate over the different occurrence indices.
1885 int occurrence_index = 0;
1886 if (use->UsesMultipleTimes(current)) {
1887 occurrence_index = current->uses()->CountOccurrences(use, 0, i - 1);
1888 if (FLAG_trace_representation) {
1889 PrintF("Instruction %d is used multiple times at %d; occurrence=%d\n",
1890 current->id(),
1891 use->id(),
1892 occurrence_index);
1893 }
1894 }
1895 int operand_index = use->LookupOperandIndex(occurrence_index, current);
1896 Representation req = use->RequiredInputRepresentation(operand_index);
1897 if (req.IsNone() || req.Equals(r)) continue;
1898 int index = 0;
1899 while (to_convert.length() > index &&
1900 CompareConversionUses(to_convert[index],
1901 use,
1902 to_convert_reps[index],
1903 req) < 0) {
1904 ++index;
1905 }
1906 if (FLAG_trace_representation) {
1907 PrintF("Inserting a representation change to %s of %d for use at %d\n",
1908 req.Mnemonic(),
1909 current->id(),
1910 use->id());
1911 }
1912 to_convert.InsertAt(index, use);
1913 to_convert_reps.InsertAt(index, req);
1914 }
1915
1916 for (int i = 0; i < to_convert.length(); ++i) {
1917 HValue* use = to_convert[i];
1918 Representation r_to = to_convert_reps[i];
1919 bool is_truncating = use->CheckFlag(HValue::kTruncatingToInt32);
1920 InsertRepresentationChangeForUse(current, use, r_to, is_truncating);
1921 }
1922
1923 if (current->uses()->is_empty()) {
1924 ASSERT(current->IsConstant());
1925 current->Delete();
1926 }
1927}
1928
1929
1930void HGraph::InsertRepresentationChanges() {
1931 HPhase phase("Insert representation changes", this);
1932
1933
1934 // Compute truncation flag for phis: Initially assume that all
1935 // int32-phis allow truncation and iteratively remove the ones that
1936 // are used in an operation that does not allow a truncating
1937 // conversion.
1938 // TODO(fschneider): Replace this with a worklist-based iteration.
1939 for (int i = 0; i < phi_list()->length(); i++) {
1940 HPhi* phi = phi_list()->at(i);
1941 if (phi->representation().IsInteger32()) {
1942 phi->SetFlag(HValue::kTruncatingToInt32);
1943 }
1944 }
1945 bool change = true;
1946 while (change) {
1947 change = false;
1948 for (int i = 0; i < phi_list()->length(); i++) {
1949 HPhi* phi = phi_list()->at(i);
1950 if (!phi->CheckFlag(HValue::kTruncatingToInt32)) continue;
1951 for (int j = 0; j < phi->uses()->length(); j++) {
1952 HValue* use = phi->uses()->at(j);
1953 if (!use->CheckFlag(HValue::kTruncatingToInt32)) {
1954 phi->ClearFlag(HValue::kTruncatingToInt32);
1955 change = true;
1956 break;
1957 }
1958 }
1959 }
1960 }
1961
1962 for (int i = 0; i < blocks_.length(); ++i) {
1963 // Process phi instructions first.
1964 for (int j = 0; j < blocks_[i]->phis()->length(); j++) {
1965 HPhi* phi = blocks_[i]->phis()->at(j);
1966 InsertRepresentationChanges(phi);
1967 }
1968
1969 // Process normal instructions.
1970 HInstruction* current = blocks_[i]->first();
1971 while (current != NULL) {
1972 InsertRepresentationChanges(current);
1973 current = current->next();
1974 }
1975 }
1976}
1977
1978
1979// Implementation of utility classes to represent an expression's context in
1980// the AST.
1981AstContext::AstContext(HGraphBuilder* owner, Expression::Context kind)
1982 : owner_(owner), kind_(kind), outer_(owner->ast_context()) {
1983 owner->set_ast_context(this); // Push.
1984#ifdef DEBUG
Steve Block9fac8402011-05-12 15:51:54 +01001985 original_length_ = owner->environment()->length();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001986#endif
1987}
1988
1989
1990AstContext::~AstContext() {
1991 owner_->set_ast_context(outer_); // Pop.
1992}
1993
1994
1995EffectContext::~EffectContext() {
1996 ASSERT(owner()->HasStackOverflow() ||
1997 !owner()->subgraph()->HasExit() ||
Steve Block9fac8402011-05-12 15:51:54 +01001998 owner()->environment()->length() == original_length_);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001999}
2000
2001
2002ValueContext::~ValueContext() {
2003 ASSERT(owner()->HasStackOverflow() ||
2004 !owner()->subgraph()->HasExit() ||
Steve Block9fac8402011-05-12 15:51:54 +01002005 owner()->environment()->length() == original_length_ + 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002006}
2007
2008
2009void EffectContext::ReturnValue(HValue* value) {
2010 // The value is simply ignored.
2011}
2012
2013
2014void ValueContext::ReturnValue(HValue* value) {
2015 // The value is tracked in the bailout environment, and communicated
2016 // through the environment as the result of the expression.
2017 owner()->Push(value);
2018}
2019
2020
2021void TestContext::ReturnValue(HValue* value) {
2022 BuildBranch(value);
2023}
2024
2025
2026void EffectContext::ReturnInstruction(HInstruction* instr, int ast_id) {
2027 owner()->AddInstruction(instr);
2028 if (instr->HasSideEffects()) owner()->AddSimulate(ast_id);
2029}
2030
2031
2032void ValueContext::ReturnInstruction(HInstruction* instr, int ast_id) {
2033 owner()->AddInstruction(instr);
2034 owner()->Push(instr);
2035 if (instr->HasSideEffects()) owner()->AddSimulate(ast_id);
2036}
2037
2038
2039void TestContext::ReturnInstruction(HInstruction* instr, int ast_id) {
2040 HGraphBuilder* builder = owner();
2041 builder->AddInstruction(instr);
2042 // We expect a simulate after every expression with side effects, though
2043 // this one isn't actually needed (and wouldn't work if it were targeted).
2044 if (instr->HasSideEffects()) {
2045 builder->Push(instr);
2046 builder->AddSimulate(ast_id);
2047 builder->Pop();
2048 }
2049 BuildBranch(instr);
2050}
2051
2052
2053void TestContext::BuildBranch(HValue* value) {
2054 // We expect the graph to be in edge-split form: there is no edge that
2055 // connects a branch node to a join node. We conservatively ensure that
2056 // property by always adding an empty block on the outgoing edges of this
2057 // branch.
2058 HGraphBuilder* builder = owner();
2059 HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
2060 HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
2061 HBranch* branch = new HBranch(empty_true, empty_false, value);
2062 builder->CurrentBlock()->Finish(branch);
2063
2064 HValue* const no_return_value = NULL;
2065 HBasicBlock* true_target = if_true();
2066 if (true_target->IsInlineReturnTarget()) {
2067 empty_true->AddLeaveInlined(no_return_value, true_target);
2068 } else {
2069 empty_true->Goto(true_target);
2070 }
2071
2072 HBasicBlock* false_target = if_false();
2073 if (false_target->IsInlineReturnTarget()) {
2074 empty_false->AddLeaveInlined(no_return_value, false_target);
2075 } else {
2076 empty_false->Goto(false_target);
2077 }
2078 builder->subgraph()->set_exit_block(NULL);
2079}
2080
2081
2082// HGraphBuilder infrastructure for bailing out and checking bailouts.
2083#define BAILOUT(reason) \
2084 do { \
2085 Bailout(reason); \
2086 return; \
2087 } while (false)
2088
2089
2090#define CHECK_BAILOUT \
2091 do { \
2092 if (HasStackOverflow()) return; \
2093 } while (false)
2094
2095
2096#define VISIT_FOR_EFFECT(expr) \
2097 do { \
2098 VisitForEffect(expr); \
2099 if (HasStackOverflow()) return; \
2100 } while (false)
2101
2102
2103#define VISIT_FOR_VALUE(expr) \
2104 do { \
2105 VisitForValue(expr); \
2106 if (HasStackOverflow()) return; \
2107 } while (false)
2108
2109
2110#define VISIT_FOR_CONTROL(expr, true_block, false_block) \
2111 do { \
2112 VisitForControl(expr, true_block, false_block); \
2113 if (HasStackOverflow()) return; \
2114 } while (false)
2115
2116
2117// 'thing' could be an expression, statement, or list of statements.
2118#define ADD_TO_SUBGRAPH(graph, thing) \
2119 do { \
2120 AddToSubgraph(graph, thing); \
2121 if (HasStackOverflow()) return; \
2122 } while (false)
2123
2124
2125class HGraphBuilder::SubgraphScope BASE_EMBEDDED {
2126 public:
2127 SubgraphScope(HGraphBuilder* builder, HSubgraph* new_subgraph)
2128 : builder_(builder) {
2129 old_subgraph_ = builder_->current_subgraph_;
2130 subgraph_ = new_subgraph;
2131 builder_->current_subgraph_ = subgraph_;
2132 }
2133
2134 ~SubgraphScope() {
2135 old_subgraph_->AddBreakContinueInfo(subgraph_);
2136 builder_->current_subgraph_ = old_subgraph_;
2137 }
2138
2139 HSubgraph* subgraph() const { return subgraph_; }
2140
2141 private:
2142 HGraphBuilder* builder_;
2143 HSubgraph* old_subgraph_;
2144 HSubgraph* subgraph_;
2145};
2146
2147
2148void HGraphBuilder::Bailout(const char* reason) {
2149 if (FLAG_trace_bailout) {
2150 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
2151 PrintF("Bailout in HGraphBuilder: @\"%s\": %s\n", *debug_name, reason);
2152 }
2153 SetStackOverflow();
2154}
2155
2156
2157void HGraphBuilder::VisitForEffect(Expression* expr) {
2158 EffectContext for_effect(this);
2159 Visit(expr);
2160}
2161
2162
2163void HGraphBuilder::VisitForValue(Expression* expr) {
2164 ValueContext for_value(this);
2165 Visit(expr);
2166}
2167
2168
2169void HGraphBuilder::VisitForControl(Expression* expr,
2170 HBasicBlock* true_block,
2171 HBasicBlock* false_block) {
2172 TestContext for_test(this, true_block, false_block);
2173 Visit(expr);
2174}
2175
2176
2177HValue* HGraphBuilder::VisitArgument(Expression* expr) {
2178 VisitForValue(expr);
2179 if (HasStackOverflow() || !subgraph()->HasExit()) return NULL;
2180 return environment()->Top();
2181}
2182
2183
2184void HGraphBuilder::VisitArgumentList(ZoneList<Expression*>* arguments) {
2185 for (int i = 0; i < arguments->length(); i++) {
2186 VisitArgument(arguments->at(i));
2187 if (HasStackOverflow() || !current_subgraph_->HasExit()) return;
2188 }
2189}
2190
2191
2192HGraph* HGraphBuilder::CreateGraph(CompilationInfo* info) {
2193 ASSERT(current_subgraph_ == NULL);
2194 graph_ = new HGraph(info);
2195
2196 {
2197 HPhase phase("Block building");
2198 graph_->Initialize(CreateBasicBlock(graph_->start_environment()));
2199 current_subgraph_ = graph_;
2200
2201 Scope* scope = info->scope();
2202 SetupScope(scope);
2203 VisitDeclarations(scope->declarations());
2204
2205 AddInstruction(new HStackCheck());
2206
2207 ZoneList<Statement*>* stmts = info->function()->body();
2208 HSubgraph* body = CreateGotoSubgraph(environment());
2209 AddToSubgraph(body, stmts);
2210 if (HasStackOverflow()) return NULL;
2211 current_subgraph_->Append(body, NULL);
2212 body->entry_block()->SetJoinId(info->function()->id());
2213
2214 if (graph_->HasExit()) {
2215 graph_->FinishExit(new HReturn(graph_->GetConstantUndefined()));
2216 }
2217 }
2218
2219 graph_->OrderBlocks();
2220 graph_->AssignDominators();
2221 graph_->EliminateRedundantPhis();
2222 if (!graph_->CollectPhis()) {
2223 Bailout("Phi-use of arguments object");
2224 return NULL;
2225 }
2226
2227 HInferRepresentation rep(graph_);
2228 rep.Analyze();
2229
2230 if (FLAG_use_range) {
2231 HRangeAnalysis rangeAnalysis(graph_);
2232 rangeAnalysis.Analyze();
2233 }
2234
2235 graph_->InitializeInferredTypes();
2236 graph_->Canonicalize();
2237 graph_->InsertRepresentationChanges();
2238
2239 // Eliminate redundant stack checks on backwards branches.
2240 HStackCheckEliminator sce(graph_);
2241 sce.Process();
2242
2243 // Perform common subexpression elimination and loop-invariant code motion.
2244 if (FLAG_use_gvn) {
2245 HPhase phase("Global value numbering", graph_);
2246 HGlobalValueNumberer gvn(graph_);
2247 gvn.Analyze();
2248 }
2249
2250 return graph_;
2251}
2252
2253
2254void HGraphBuilder::AddToSubgraph(HSubgraph* graph, Statement* stmt) {
2255 SubgraphScope scope(this, graph);
2256 Visit(stmt);
2257}
2258
2259
2260void HGraphBuilder::AddToSubgraph(HSubgraph* graph, Expression* expr) {
2261 SubgraphScope scope(this, graph);
2262 VisitForValue(expr);
2263}
2264
2265
2266void HGraphBuilder::AddToSubgraph(HSubgraph* graph,
2267 ZoneList<Statement*>* stmts) {
2268 SubgraphScope scope(this, graph);
2269 VisitStatements(stmts);
2270}
2271
2272
2273HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
2274 ASSERT(current_subgraph_->HasExit());
2275 current_subgraph_->exit_block()->AddInstruction(instr);
2276 return instr;
2277}
2278
2279
2280void HGraphBuilder::AddSimulate(int id) {
2281 ASSERT(current_subgraph_->HasExit());
2282 current_subgraph_->exit_block()->AddSimulate(id);
2283}
2284
2285
2286void HGraphBuilder::AddPhi(HPhi* instr) {
2287 ASSERT(current_subgraph_->HasExit());
2288 current_subgraph_->exit_block()->AddPhi(instr);
2289}
2290
2291
2292void HGraphBuilder::PushAndAdd(HInstruction* instr) {
2293 Push(instr);
2294 AddInstruction(instr);
2295}
2296
2297
2298void HGraphBuilder::PushArgumentsForStubCall(int argument_count) {
2299 const int kMaxStubArguments = 4;
2300 ASSERT_GE(kMaxStubArguments, argument_count);
2301 // Push the arguments on the stack.
2302 HValue* arguments[kMaxStubArguments];
2303 for (int i = argument_count - 1; i >= 0; i--) {
2304 arguments[i] = Pop();
2305 }
2306 for (int i = 0; i < argument_count; i++) {
2307 AddInstruction(new HPushArgument(arguments[i]));
2308 }
2309}
2310
2311
2312void HGraphBuilder::ProcessCall(HCall* call) {
2313 for (int i = call->argument_count() - 1; i >= 0; --i) {
2314 HValue* value = Pop();
2315 HPushArgument* push = new HPushArgument(value);
2316 call->SetArgumentAt(i, push);
2317 }
2318
2319 for (int i = 0; i < call->argument_count(); ++i) {
2320 AddInstruction(call->PushArgumentAt(i));
2321 }
2322}
2323
2324
2325void HGraphBuilder::SetupScope(Scope* scope) {
2326 // We don't yet handle the function name for named function expressions.
2327 if (scope->function() != NULL) BAILOUT("named function expression");
2328
2329 // We can't handle heap-allocated locals.
2330 if (scope->num_heap_slots() > 0) BAILOUT("heap allocated locals");
2331
2332 HConstant* undefined_constant =
2333 new HConstant(Factory::undefined_value(), Representation::Tagged());
2334 AddInstruction(undefined_constant);
2335 graph_->set_undefined_constant(undefined_constant);
2336
2337 // Set the initial values of parameters including "this". "This" has
2338 // parameter index 0.
2339 int count = scope->num_parameters() + 1;
2340 for (int i = 0; i < count; ++i) {
2341 HInstruction* parameter = AddInstruction(new HParameter(i));
2342 environment()->Bind(i, parameter);
2343 }
2344
2345 // Set the initial values of stack-allocated locals.
Steve Block9fac8402011-05-12 15:51:54 +01002346 for (int i = count; i < environment()->length(); ++i) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002347 environment()->Bind(i, undefined_constant);
2348 }
2349
2350 // Handle the arguments and arguments shadow variables specially (they do
2351 // not have declarations).
2352 if (scope->arguments() != NULL) {
2353 HArgumentsObject* object = new HArgumentsObject;
2354 AddInstruction(object);
2355 graph()->SetArgumentsObject(object);
2356 environment()->Bind(scope->arguments(), object);
2357 environment()->Bind(scope->arguments_shadow(), object);
2358 }
2359}
2360
2361
2362void HGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
2363 for (int i = 0; i < statements->length(); i++) {
2364 Visit(statements->at(i));
2365 if (HasStackOverflow() || !current_subgraph_->HasExit()) break;
2366 }
2367}
2368
2369
2370HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
2371 HBasicBlock* b = graph()->CreateBasicBlock();
2372 b->SetInitialEnvironment(env);
2373 return b;
2374}
2375
2376
2377HSubgraph* HGraphBuilder::CreateInlinedSubgraph(HEnvironment* outer,
2378 Handle<JSFunction> target,
2379 FunctionLiteral* function) {
2380 HConstant* undefined = graph()->GetConstantUndefined();
2381 HEnvironment* inner =
2382 outer->CopyForInlining(target, function, true, undefined);
2383 HSubgraph* subgraph = new HSubgraph(graph());
2384 subgraph->Initialize(CreateBasicBlock(inner));
2385 return subgraph;
2386}
2387
2388
2389HSubgraph* HGraphBuilder::CreateGotoSubgraph(HEnvironment* env) {
2390 HSubgraph* subgraph = new HSubgraph(graph());
2391 HEnvironment* new_env = env->CopyWithoutHistory();
2392 subgraph->Initialize(CreateBasicBlock(new_env));
2393 return subgraph;
2394}
2395
2396
2397HSubgraph* HGraphBuilder::CreateEmptySubgraph() {
2398 HSubgraph* subgraph = new HSubgraph(graph());
2399 subgraph->Initialize(graph()->CreateBasicBlock());
2400 return subgraph;
2401}
2402
2403
2404HSubgraph* HGraphBuilder::CreateBranchSubgraph(HEnvironment* env) {
2405 HSubgraph* subgraph = new HSubgraph(graph());
2406 HEnvironment* new_env = env->Copy();
2407 subgraph->Initialize(CreateBasicBlock(new_env));
2408 return subgraph;
2409}
2410
2411
2412HSubgraph* HGraphBuilder::CreateLoopHeaderSubgraph(HEnvironment* env) {
2413 HSubgraph* subgraph = new HSubgraph(graph());
2414 HBasicBlock* block = graph()->CreateBasicBlock();
2415 HEnvironment* new_env = env->CopyAsLoopHeader(block);
2416 block->SetInitialEnvironment(new_env);
2417 subgraph->Initialize(block);
2418 subgraph->entry_block()->AttachLoopInformation();
2419 return subgraph;
2420}
2421
2422
2423void HGraphBuilder::VisitBlock(Block* stmt) {
2424 if (stmt->labels() != NULL) {
2425 HSubgraph* block_graph = CreateGotoSubgraph(environment());
2426 ADD_TO_SUBGRAPH(block_graph, stmt->statements());
2427 current_subgraph_->Append(block_graph, stmt);
2428 } else {
2429 VisitStatements(stmt->statements());
2430 }
2431}
2432
2433
2434void HGraphBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
2435 VisitForEffect(stmt->expression());
2436}
2437
2438
2439void HGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
2440}
2441
2442
2443void HGraphBuilder::VisitIfStatement(IfStatement* stmt) {
2444 if (stmt->condition()->ToBooleanIsTrue()) {
2445 AddSimulate(stmt->ThenId());
2446 Visit(stmt->then_statement());
2447 } else if (stmt->condition()->ToBooleanIsFalse()) {
2448 AddSimulate(stmt->ElseId());
2449 Visit(stmt->else_statement());
2450 } else {
2451 HSubgraph* then_graph = CreateEmptySubgraph();
2452 HSubgraph* else_graph = CreateEmptySubgraph();
2453 VISIT_FOR_CONTROL(stmt->condition(),
2454 then_graph->entry_block(),
2455 else_graph->entry_block());
2456
2457 then_graph->entry_block()->SetJoinId(stmt->ThenId());
2458 ADD_TO_SUBGRAPH(then_graph, stmt->then_statement());
2459
2460 else_graph->entry_block()->SetJoinId(stmt->ElseId());
2461 ADD_TO_SUBGRAPH(else_graph, stmt->else_statement());
2462
2463 current_subgraph_->AppendJoin(then_graph, else_graph, stmt);
2464 }
2465}
2466
2467
2468void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) {
2469 current_subgraph_->FinishBreakContinue(stmt->target(), true);
2470}
2471
2472
2473void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
2474 current_subgraph_->FinishBreakContinue(stmt->target(), false);
2475}
2476
2477
2478void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
2479 AstContext* context = call_context();
2480 if (context == NULL) {
2481 // Not an inlined return, so an actual one.
2482 VISIT_FOR_VALUE(stmt->expression());
2483 HValue* result = environment()->Pop();
2484 subgraph()->FinishExit(new HReturn(result));
2485 } else {
2486 // Return from an inlined function, visit the subexpression in the
2487 // expression context of the call.
2488 if (context->IsTest()) {
2489 TestContext* test = TestContext::cast(context);
2490 VisitForControl(stmt->expression(),
2491 test->if_true(),
2492 test->if_false());
2493 } else {
2494 HValue* return_value = NULL;
2495 if (context->IsEffect()) {
2496 VISIT_FOR_EFFECT(stmt->expression());
2497 return_value = graph()->GetConstantUndefined();
2498 } else {
2499 ASSERT(context->IsValue());
2500 VISIT_FOR_VALUE(stmt->expression());
2501 return_value = environment()->Pop();
2502 }
2503 subgraph()->exit_block()->AddLeaveInlined(return_value,
2504 function_return_);
2505 subgraph()->set_exit_block(NULL);
2506 }
2507 }
2508}
2509
2510
2511void HGraphBuilder::VisitWithEnterStatement(WithEnterStatement* stmt) {
2512 BAILOUT("WithEnterStatement");
2513}
2514
2515
2516void HGraphBuilder::VisitWithExitStatement(WithExitStatement* stmt) {
2517 BAILOUT("WithExitStatement");
2518}
2519
2520
2521HCompare* HGraphBuilder::BuildSwitchCompare(HSubgraph* subgraph,
2522 HValue* switch_value,
2523 CaseClause* clause) {
2524 AddToSubgraph(subgraph, clause->label());
2525 if (HasStackOverflow()) return NULL;
2526 HValue* clause_value = subgraph->environment()->Pop();
2527 HCompare* compare = new HCompare(switch_value,
2528 clause_value,
2529 Token::EQ_STRICT);
2530 compare->SetInputRepresentation(Representation::Integer32());
2531 subgraph->exit_block()->AddInstruction(compare);
2532 return compare;
2533}
2534
2535
2536void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
2537 VISIT_FOR_VALUE(stmt->tag());
2538 // TODO(3168478): simulate added for tag should be enough.
2539 AddSimulate(stmt->EntryId());
2540 HValue* switch_value = Pop();
2541
2542 ZoneList<CaseClause*>* clauses = stmt->cases();
2543 int num_clauses = clauses->length();
2544 if (num_clauses == 0) return;
2545 if (num_clauses > 128) BAILOUT("SwitchStatement: too many clauses");
2546
2547 int num_smi_clauses = num_clauses;
2548 for (int i = 0; i < num_clauses; i++) {
2549 CaseClause* clause = clauses->at(i);
2550 if (clause->is_default()) continue;
2551 clause->RecordTypeFeedback(oracle());
2552 if (!clause->IsSmiCompare()) {
2553 if (i == 0) BAILOUT("SwitchStatement: no smi compares");
2554 // We will deoptimize if the first non-smi compare is reached.
2555 num_smi_clauses = i;
2556 break;
2557 }
2558 if (!clause->label()->IsSmiLiteral()) {
2559 BAILOUT("SwitchStatement: non-literal switch label");
2560 }
2561 }
2562
2563 // The single exit block of the whole switch statement.
2564 HBasicBlock* single_exit_block = graph_->CreateBasicBlock();
2565
2566 // Build a series of empty subgraphs for the comparisons.
2567 // The default clause does not have a comparison subgraph.
2568 ZoneList<HSubgraph*> compare_graphs(num_smi_clauses);
2569 for (int i = 0; i < num_smi_clauses; i++) {
2570 if (clauses->at(i)->is_default()) {
2571 compare_graphs.Add(NULL);
2572 } else {
2573 compare_graphs.Add(CreateEmptySubgraph());
2574 }
2575 }
2576
2577 HSubgraph* prev_graph = current_subgraph_;
2578 HCompare* prev_compare_inst = NULL;
2579 for (int i = 0; i < num_smi_clauses; i++) {
2580 CaseClause* clause = clauses->at(i);
2581 if (clause->is_default()) continue;
2582
2583 // Finish the previous graph by connecting it to the current.
2584 HSubgraph* subgraph = compare_graphs.at(i);
2585 if (prev_compare_inst == NULL) {
2586 ASSERT(prev_graph == current_subgraph_);
2587 prev_graph->exit_block()->Finish(new HGoto(subgraph->entry_block()));
2588 } else {
2589 HBasicBlock* empty = graph()->CreateBasicBlock();
2590 prev_graph->exit_block()->Finish(new HBranch(empty,
2591 subgraph->entry_block(),
2592 prev_compare_inst));
2593 }
2594
2595 // Build instructions for current subgraph.
2596 ASSERT(clause->IsSmiCompare());
2597 prev_compare_inst = BuildSwitchCompare(subgraph, switch_value, clause);
2598 if (HasStackOverflow()) return;
2599
2600 prev_graph = subgraph;
2601 }
2602
2603 // Finish last comparison if there was at least one comparison.
2604 // last_false_block is the (empty) false-block of the last comparison. If
2605 // there are no comparisons at all (a single default clause), it is just
2606 // the last block of the current subgraph.
2607 HBasicBlock* last_false_block = current_subgraph_->exit_block();
2608 if (prev_graph != current_subgraph_) {
2609 last_false_block = graph()->CreateBasicBlock();
2610 HBasicBlock* empty = graph()->CreateBasicBlock();
2611 prev_graph->exit_block()->Finish(new HBranch(empty,
2612 last_false_block,
2613 prev_compare_inst));
2614 }
2615
2616 // If we have a non-smi compare clause, we deoptimize after trying
2617 // all the previous compares.
2618 if (num_smi_clauses < num_clauses) {
2619 last_false_block->Finish(new HDeoptimize);
2620 }
2621
2622 // Build statement blocks, connect them to their comparison block and
2623 // to the previous statement block, if there is a fall-through.
2624 HSubgraph* previous_subgraph = NULL;
2625 for (int i = 0; i < num_clauses; i++) {
2626 CaseClause* clause = clauses->at(i);
2627 // Subgraph for the statements of the clause is only created when
2628 // it's reachable either from the corresponding compare or as a
2629 // fall-through from previous statements.
2630 HSubgraph* subgraph = NULL;
2631
2632 if (i < num_smi_clauses) {
2633 if (clause->is_default()) {
2634 if (!last_false_block->IsFinished()) {
2635 // Default clause: Connect it to the last false block.
2636 subgraph = CreateEmptySubgraph();
2637 last_false_block->Finish(new HGoto(subgraph->entry_block()));
2638 }
2639 } else {
2640 ASSERT(clause->IsSmiCompare());
2641 // Connect with the corresponding comparison.
2642 subgraph = CreateEmptySubgraph();
2643 HBasicBlock* empty =
2644 compare_graphs.at(i)->exit_block()->end()->FirstSuccessor();
2645 empty->Finish(new HGoto(subgraph->entry_block()));
2646 }
2647 }
2648
2649 // Check for fall-through from previous statement block.
2650 if (previous_subgraph != NULL && previous_subgraph->HasExit()) {
2651 if (subgraph == NULL) subgraph = CreateEmptySubgraph();
2652 previous_subgraph->exit_block()->
2653 Finish(new HGoto(subgraph->entry_block()));
2654 }
2655
2656 if (subgraph != NULL) {
2657 ADD_TO_SUBGRAPH(subgraph, clause->statements());
2658 HBasicBlock* break_block = subgraph->BundleBreak(stmt);
2659 if (break_block != NULL) {
2660 break_block->Finish(new HGoto(single_exit_block));
2661 }
2662 }
2663
2664 previous_subgraph = subgraph;
2665 }
2666
2667 // If the last statement block has a fall-through, connect it to the
2668 // single exit block.
2669 if (previous_subgraph != NULL && previous_subgraph->HasExit()) {
2670 previous_subgraph->exit_block()->Finish(new HGoto(single_exit_block));
2671 }
2672
2673 // If there is no default clause finish the last comparison's false target.
2674 if (!last_false_block->IsFinished()) {
2675 last_false_block->Finish(new HGoto(single_exit_block));
2676 }
2677
2678 if (single_exit_block->HasPredecessor()) {
2679 current_subgraph_->set_exit_block(single_exit_block);
2680 } else {
2681 current_subgraph_->set_exit_block(NULL);
2682 }
2683}
2684
2685bool HGraph::HasOsrEntryAt(IterationStatement* statement) {
2686 return statement->OsrEntryId() == info()->osr_ast_id();
2687}
2688
2689
2690void HSubgraph::PreProcessOsrEntry(IterationStatement* statement) {
2691 if (!graph()->HasOsrEntryAt(statement)) return;
2692
2693 HBasicBlock* non_osr_entry = graph()->CreateBasicBlock();
2694 HBasicBlock* osr_entry = graph()->CreateBasicBlock();
2695 HValue* true_value = graph()->GetConstantTrue();
2696 HBranch* branch = new HBranch(non_osr_entry, osr_entry, true_value);
2697 exit_block()->Finish(branch);
2698
2699 HBasicBlock* loop_predecessor = graph()->CreateBasicBlock();
2700 non_osr_entry->Goto(loop_predecessor);
2701
2702 int osr_entry_id = statement->OsrEntryId();
2703 // We want the correct environment at the OsrEntry instruction. Build
2704 // it explicitly. The expression stack should be empty.
Steve Block9fac8402011-05-12 15:51:54 +01002705 int count = osr_entry->last_environment()->length();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002706 ASSERT(count == (osr_entry->last_environment()->parameter_count() +
2707 osr_entry->last_environment()->local_count()));
2708 for (int i = 0; i < count; ++i) {
2709 HUnknownOSRValue* unknown = new HUnknownOSRValue;
2710 osr_entry->AddInstruction(unknown);
2711 osr_entry->last_environment()->Bind(i, unknown);
2712 }
2713
2714 osr_entry->AddSimulate(osr_entry_id);
2715 osr_entry->AddInstruction(new HOsrEntry(osr_entry_id));
2716 osr_entry->Goto(loop_predecessor);
2717 loop_predecessor->SetJoinId(statement->EntryId());
2718 set_exit_block(loop_predecessor);
2719}
2720
2721
2722void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
2723 ASSERT(subgraph()->HasExit());
2724 subgraph()->PreProcessOsrEntry(stmt);
2725
2726 HSubgraph* body_graph = CreateLoopHeaderSubgraph(environment());
2727 ADD_TO_SUBGRAPH(body_graph, stmt->body());
2728 body_graph->ResolveContinue(stmt);
2729
2730 if (!body_graph->HasExit() || stmt->cond()->ToBooleanIsTrue()) {
2731 current_subgraph_->AppendEndless(body_graph, stmt);
2732 } else {
2733 HSubgraph* go_back = CreateEmptySubgraph();
2734 HSubgraph* exit = CreateEmptySubgraph();
2735 {
2736 SubgraphScope scope(this, body_graph);
2737 VISIT_FOR_CONTROL(stmt->cond(),
2738 go_back->entry_block(),
2739 exit->entry_block());
2740 go_back->entry_block()->SetJoinId(stmt->BackEdgeId());
2741 exit->entry_block()->SetJoinId(stmt->ExitId());
2742 }
2743 current_subgraph_->AppendDoWhile(body_graph, stmt, go_back, exit);
2744 }
2745}
2746
2747
2748bool HGraphBuilder::ShouldPeel(HSubgraph* cond, HSubgraph* body) {
2749 return FLAG_use_peeling;
2750}
2751
2752
2753void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
2754 ASSERT(subgraph()->HasExit());
2755 subgraph()->PreProcessOsrEntry(stmt);
2756
2757 HSubgraph* cond_graph = NULL;
2758 HSubgraph* body_graph = NULL;
2759 HSubgraph* exit_graph = NULL;
2760
2761 // If the condition is constant true, do not generate a condition subgraph.
2762 if (stmt->cond()->ToBooleanIsTrue()) {
2763 body_graph = CreateLoopHeaderSubgraph(environment());
2764 ADD_TO_SUBGRAPH(body_graph, stmt->body());
2765 } else {
2766 cond_graph = CreateLoopHeaderSubgraph(environment());
2767 body_graph = CreateEmptySubgraph();
2768 exit_graph = CreateEmptySubgraph();
2769 {
2770 SubgraphScope scope(this, cond_graph);
2771 VISIT_FOR_CONTROL(stmt->cond(),
2772 body_graph->entry_block(),
2773 exit_graph->entry_block());
2774 body_graph->entry_block()->SetJoinId(stmt->BodyId());
2775 exit_graph->entry_block()->SetJoinId(stmt->ExitId());
2776 }
2777 ADD_TO_SUBGRAPH(body_graph, stmt->body());
2778 }
2779
2780 body_graph->ResolveContinue(stmt);
2781
2782 if (cond_graph != NULL) {
2783 AppendPeeledWhile(stmt, cond_graph, body_graph, exit_graph);
2784 } else {
2785 // TODO(fschneider): Implement peeling for endless loops as well.
2786 current_subgraph_->AppendEndless(body_graph, stmt);
2787 }
2788}
2789
2790
2791void HGraphBuilder::AppendPeeledWhile(IterationStatement* stmt,
2792 HSubgraph* cond_graph,
2793 HSubgraph* body_graph,
2794 HSubgraph* exit_graph) {
2795 HSubgraph* loop = NULL;
2796 if (body_graph->HasExit() && stmt != peeled_statement_ &&
2797 ShouldPeel(cond_graph, body_graph)) {
2798 // Save the last peeled iteration statement to prevent infinite recursion.
2799 IterationStatement* outer_peeled_statement = peeled_statement_;
2800 peeled_statement_ = stmt;
2801 loop = CreateGotoSubgraph(body_graph->environment());
2802 ADD_TO_SUBGRAPH(loop, stmt);
2803 peeled_statement_ = outer_peeled_statement;
2804 }
2805 current_subgraph_->AppendWhile(cond_graph, body_graph, stmt, loop,
2806 exit_graph);
2807}
2808
2809
2810void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
2811 // Only visit the init statement in the peeled part of the loop.
2812 if (stmt->init() != NULL && peeled_statement_ != stmt) {
2813 Visit(stmt->init());
2814 CHECK_BAILOUT;
2815 }
2816 ASSERT(subgraph()->HasExit());
2817 subgraph()->PreProcessOsrEntry(stmt);
2818
2819 HSubgraph* cond_graph = NULL;
2820 HSubgraph* body_graph = NULL;
2821 HSubgraph* exit_graph = NULL;
2822 if (stmt->cond() != NULL) {
2823 cond_graph = CreateLoopHeaderSubgraph(environment());
2824 body_graph = CreateEmptySubgraph();
2825 exit_graph = CreateEmptySubgraph();
2826 {
2827 SubgraphScope scope(this, cond_graph);
2828 VISIT_FOR_CONTROL(stmt->cond(),
2829 body_graph->entry_block(),
2830 exit_graph->entry_block());
2831 body_graph->entry_block()->SetJoinId(stmt->BodyId());
2832 exit_graph->entry_block()->SetJoinId(stmt->ExitId());
2833 }
2834 } else {
2835 body_graph = CreateLoopHeaderSubgraph(environment());
2836 }
2837 ADD_TO_SUBGRAPH(body_graph, stmt->body());
2838
2839 HSubgraph* next_graph = NULL;
2840 body_graph->ResolveContinue(stmt);
2841
2842 if (stmt->next() != NULL && body_graph->HasExit()) {
2843 next_graph = CreateGotoSubgraph(body_graph->environment());
2844 ADD_TO_SUBGRAPH(next_graph, stmt->next());
2845 body_graph->Append(next_graph, NULL);
2846 next_graph->entry_block()->SetJoinId(stmt->ContinueId());
2847 }
2848
2849 if (cond_graph != NULL) {
2850 AppendPeeledWhile(stmt, cond_graph, body_graph, exit_graph);
2851 } else {
2852 current_subgraph_->AppendEndless(body_graph, stmt);
2853 }
2854}
2855
2856
2857void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
2858 BAILOUT("ForInStatement");
2859}
2860
2861
2862void HGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
2863 BAILOUT("TryCatchStatement");
2864}
2865
2866
2867void HGraphBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
2868 BAILOUT("TryFinallyStatement");
2869}
2870
2871
2872void HGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
2873 BAILOUT("DebuggerStatement");
2874}
2875
2876
2877void HGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
2878 Handle<SharedFunctionInfo> shared_info =
2879 Compiler::BuildFunctionInfo(expr, graph_->info()->script());
2880 CHECK_BAILOUT;
2881 HFunctionLiteral* instr =
2882 new HFunctionLiteral(shared_info, expr->pretenure());
2883 ast_context()->ReturnInstruction(instr, expr->id());
2884}
2885
2886
2887void HGraphBuilder::VisitSharedFunctionInfoLiteral(
2888 SharedFunctionInfoLiteral* expr) {
2889 BAILOUT("SharedFunctionInfoLiteral");
2890}
2891
2892
2893void HGraphBuilder::VisitConditional(Conditional* expr) {
2894 HSubgraph* then_graph = CreateEmptySubgraph();
2895 HSubgraph* else_graph = CreateEmptySubgraph();
2896 VISIT_FOR_CONTROL(expr->condition(),
2897 then_graph->entry_block(),
2898 else_graph->entry_block());
2899
2900 then_graph->entry_block()->SetJoinId(expr->ThenId());
2901 ADD_TO_SUBGRAPH(then_graph, expr->then_expression());
2902
2903 else_graph->entry_block()->SetJoinId(expr->ElseId());
2904 ADD_TO_SUBGRAPH(else_graph, expr->else_expression());
2905
2906 current_subgraph_->AppendJoin(then_graph, else_graph, expr);
2907 ast_context()->ReturnValue(Pop());
2908}
2909
2910
2911void HGraphBuilder::LookupGlobalPropertyCell(Variable* var,
2912 LookupResult* lookup,
2913 bool is_store) {
2914 if (var->is_this()) {
2915 BAILOUT("global this reference");
2916 }
2917 if (!graph()->info()->has_global_object()) {
2918 BAILOUT("no global object to optimize VariableProxy");
2919 }
2920 Handle<GlobalObject> global(graph()->info()->global_object());
2921 global->Lookup(*var->name(), lookup);
2922 if (!lookup->IsProperty()) {
2923 BAILOUT("global variable cell not yet introduced");
2924 }
2925 if (lookup->type() != NORMAL) {
2926 BAILOUT("global variable has accessors");
2927 }
2928 if (is_store && lookup->IsReadOnly()) {
2929 BAILOUT("read-only global variable");
2930 }
2931}
2932
2933
2934void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
2935 Variable* variable = expr->AsVariable();
2936 if (variable == NULL) {
2937 BAILOUT("reference to rewritten variable");
2938 } else if (variable->IsStackAllocated()) {
2939 if (environment()->Lookup(variable)->CheckFlag(HValue::kIsArguments)) {
2940 BAILOUT("unsupported context for arguments object");
2941 }
2942 ast_context()->ReturnValue(environment()->Lookup(variable));
2943 } else if (variable->is_global()) {
2944 LookupResult lookup;
2945 LookupGlobalPropertyCell(variable, &lookup, false);
2946 CHECK_BAILOUT;
2947
2948 Handle<GlobalObject> global(graph()->info()->global_object());
2949 // TODO(3039103): Handle global property load through an IC call when access
2950 // checks are enabled.
2951 if (global->IsAccessCheckNeeded()) {
2952 BAILOUT("global object requires access check");
2953 }
2954 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
2955 bool check_hole = !lookup.IsDontDelete() || lookup.IsReadOnly();
2956 HLoadGlobal* instr = new HLoadGlobal(cell, check_hole);
2957 ast_context()->ReturnInstruction(instr, expr->id());
2958 } else {
2959 BAILOUT("reference to non-stack-allocated/non-global variable");
2960 }
2961}
2962
2963
2964void HGraphBuilder::VisitLiteral(Literal* expr) {
2965 HConstant* instr = new HConstant(expr->handle(), Representation::Tagged());
2966 ast_context()->ReturnInstruction(instr, expr->id());
2967}
2968
2969
2970void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
2971 HRegExpLiteral* instr = new HRegExpLiteral(expr->pattern(),
2972 expr->flags(),
2973 expr->literal_index());
2974 ast_context()->ReturnInstruction(instr, expr->id());
2975}
2976
2977
2978void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
2979 HObjectLiteral* literal = (new HObjectLiteral(expr->constant_properties(),
2980 expr->fast_elements(),
2981 expr->literal_index(),
2982 expr->depth()));
2983 // The object is expected in the bailout environment during computation
2984 // of the property values and is the value of the entire expression.
2985 PushAndAdd(literal);
2986
2987 expr->CalculateEmitStore();
2988
2989 for (int i = 0; i < expr->properties()->length(); i++) {
2990 ObjectLiteral::Property* property = expr->properties()->at(i);
2991 if (property->IsCompileTimeValue()) continue;
2992
2993 Literal* key = property->key();
2994 Expression* value = property->value();
2995
2996 switch (property->kind()) {
2997 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2998 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
2999 // Fall through.
3000 case ObjectLiteral::Property::COMPUTED:
3001 if (key->handle()->IsSymbol()) {
3002 if (property->emit_store()) {
3003 VISIT_FOR_VALUE(value);
3004 HValue* value = Pop();
3005 Handle<String> name = Handle<String>::cast(key->handle());
3006 AddInstruction(new HStoreNamedGeneric(literal, name, value));
3007 AddSimulate(key->id());
3008 } else {
3009 VISIT_FOR_EFFECT(value);
3010 }
3011 break;
3012 }
3013 // Fall through.
3014 case ObjectLiteral::Property::PROTOTYPE:
3015 case ObjectLiteral::Property::SETTER:
3016 case ObjectLiteral::Property::GETTER:
3017 BAILOUT("Object literal with complex property");
3018 default: UNREACHABLE();
3019 }
3020 }
3021 ast_context()->ReturnValue(Pop());
3022}
3023
3024
3025void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
3026 ZoneList<Expression*>* subexprs = expr->values();
3027 int length = subexprs->length();
3028
3029 HArrayLiteral* literal = new HArrayLiteral(expr->constant_elements(),
3030 length,
3031 expr->literal_index(),
3032 expr->depth());
3033 // The array is expected in the bailout environment during computation
3034 // of the property values and is the value of the entire expression.
3035 PushAndAdd(literal);
3036
3037 HLoadElements* elements = NULL;
3038
3039 for (int i = 0; i < length; i++) {
3040 Expression* subexpr = subexprs->at(i);
3041 // If the subexpression is a literal or a simple materialized literal it
3042 // is already set in the cloned array.
3043 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
3044
3045 VISIT_FOR_VALUE(subexpr);
3046 HValue* value = Pop();
3047 if (!Smi::IsValid(i)) BAILOUT("Non-smi key in array literal");
3048
3049 // Load the elements array before the first store.
3050 if (elements == NULL) {
3051 elements = new HLoadElements(literal);
3052 AddInstruction(elements);
3053 }
3054
3055 HValue* key = AddInstruction(new HConstant(Handle<Object>(Smi::FromInt(i)),
3056 Representation::Integer32()));
3057 AddInstruction(new HStoreKeyedFastElement(elements, key, value));
3058 AddSimulate(expr->GetIdForElement(i));
3059 }
3060 ast_context()->ReturnValue(Pop());
3061}
3062
3063
3064void HGraphBuilder::VisitCatchExtensionObject(CatchExtensionObject* expr) {
3065 BAILOUT("CatchExtensionObject");
3066}
3067
3068
3069HBasicBlock* HGraphBuilder::BuildTypeSwitch(ZoneMapList* maps,
3070 ZoneList<HSubgraph*>* subgraphs,
3071 HValue* receiver,
3072 int join_id) {
3073 ASSERT(subgraphs->length() == (maps->length() + 1));
3074
3075 // Build map compare subgraphs for all but the first map.
3076 ZoneList<HSubgraph*> map_compare_subgraphs(maps->length() - 1);
3077 for (int i = maps->length() - 1; i > 0; --i) {
3078 HSubgraph* subgraph = CreateBranchSubgraph(environment());
3079 SubgraphScope scope(this, subgraph);
3080 HSubgraph* else_subgraph =
3081 (i == (maps->length() - 1))
3082 ? subgraphs->last()
3083 : map_compare_subgraphs.last();
3084 current_subgraph_->exit_block()->Finish(
3085 new HCompareMapAndBranch(receiver,
3086 maps->at(i),
3087 subgraphs->at(i)->entry_block(),
3088 else_subgraph->entry_block()));
3089 map_compare_subgraphs.Add(subgraph);
3090 }
3091
3092 // Generate first map check to end the current block.
3093 AddInstruction(new HCheckNonSmi(receiver));
3094 HSubgraph* else_subgraph =
3095 (maps->length() == 1) ? subgraphs->at(1) : map_compare_subgraphs.last();
3096 current_subgraph_->exit_block()->Finish(
3097 new HCompareMapAndBranch(receiver,
3098 Handle<Map>(maps->first()),
3099 subgraphs->first()->entry_block(),
3100 else_subgraph->entry_block()));
3101
3102 // Join all the call subgraphs in a new basic block and make
3103 // this basic block the current basic block.
3104 HBasicBlock* join_block = graph_->CreateBasicBlock();
3105 for (int i = 0; i < subgraphs->length(); ++i) {
Steve Block9fac8402011-05-12 15:51:54 +01003106 HSubgraph* subgraph = subgraphs->at(i);
3107 if (subgraph->HasExit()) {
3108 // In an effect context the value of the type switch is not needed.
3109 // There is no need to merge it at the join block only to discard it.
3110 HBasicBlock* subgraph_exit = subgraph->exit_block();
3111 if (ast_context()->IsEffect()) {
3112 subgraph_exit->last_environment()->Drop(1);
3113 }
3114 subgraph_exit->Goto(join_block);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003115 }
3116 }
3117
3118 if (join_block->predecessors()->is_empty()) return NULL;
3119 join_block->SetJoinId(join_id);
3120 return join_block;
3121}
3122
3123
3124// Sets the lookup result and returns true if the store can be inlined.
3125static bool ComputeStoredField(Handle<Map> type,
3126 Handle<String> name,
3127 LookupResult* lookup) {
3128 type->LookupInDescriptors(NULL, *name, lookup);
3129 if (!lookup->IsPropertyOrTransition()) return false;
3130 if (lookup->type() == FIELD) return true;
3131 return (lookup->type() == MAP_TRANSITION) &&
3132 (type->unused_property_fields() > 0);
3133}
3134
3135
3136static int ComputeStoredFieldIndex(Handle<Map> type,
3137 Handle<String> name,
3138 LookupResult* lookup) {
3139 ASSERT(lookup->type() == FIELD || lookup->type() == MAP_TRANSITION);
3140 if (lookup->type() == FIELD) {
3141 return lookup->GetLocalFieldIndexFromMap(*type);
3142 } else {
3143 Map* transition = lookup->GetTransitionMapFromMap(*type);
3144 return transition->PropertyIndexFor(*name) - type->inobject_properties();
3145 }
3146}
3147
3148
3149HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
3150 Handle<String> name,
3151 HValue* value,
3152 Handle<Map> type,
3153 LookupResult* lookup,
3154 bool smi_and_map_check) {
3155 if (smi_and_map_check) {
3156 AddInstruction(new HCheckNonSmi(object));
3157 AddInstruction(new HCheckMap(object, type));
3158 }
3159
3160 int index = ComputeStoredFieldIndex(type, name, lookup);
3161 bool is_in_object = index < 0;
3162 int offset = index * kPointerSize;
3163 if (index < 0) {
3164 // Negative property indices are in-object properties, indexed
3165 // from the end of the fixed part of the object.
3166 offset += type->instance_size();
3167 } else {
3168 offset += FixedArray::kHeaderSize;
3169 }
3170 HStoreNamedField* instr =
3171 new HStoreNamedField(object, name, value, is_in_object, offset);
3172 if (lookup->type() == MAP_TRANSITION) {
3173 Handle<Map> transition(lookup->GetTransitionMapFromMap(*type));
3174 instr->set_transition(transition);
3175 // TODO(fschneider): Record the new map type of the object in the IR to
3176 // enable elimination of redundant checks after the transition store.
3177 instr->SetFlag(HValue::kChangesMaps);
3178 }
3179 return instr;
3180}
3181
3182
3183HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object,
3184 Handle<String> name,
3185 HValue* value) {
3186 return new HStoreNamedGeneric(object, name, value);
3187}
3188
3189
3190HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
3191 HValue* value,
3192 Expression* expr) {
3193 Property* prop = (expr->AsProperty() != NULL)
3194 ? expr->AsProperty()
3195 : expr->AsAssignment()->target()->AsProperty();
3196 Literal* key = prop->key()->AsLiteral();
3197 Handle<String> name = Handle<String>::cast(key->handle());
3198 ASSERT(!name.is_null());
3199
3200 LookupResult lookup;
3201 ZoneMapList* types = expr->GetReceiverTypes();
3202 bool is_monomorphic = expr->IsMonomorphic() &&
3203 ComputeStoredField(types->first(), name, &lookup);
3204
3205 return is_monomorphic
3206 ? BuildStoreNamedField(object, name, value, types->first(), &lookup,
3207 true) // Needs smi and map check.
3208 : BuildStoreNamedGeneric(object, name, value);
3209}
3210
3211
3212void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
3213 HValue* object,
3214 HValue* value,
3215 ZoneMapList* types,
3216 Handle<String> name) {
3217 int number_of_types = Min(types->length(), kMaxStorePolymorphism);
3218 ZoneMapList maps(number_of_types);
3219 ZoneList<HSubgraph*> subgraphs(number_of_types + 1);
3220 bool needs_generic = (types->length() > kMaxStorePolymorphism);
3221
3222 // Build subgraphs for each of the specific maps.
3223 //
3224 // TODO(ager): We should recognize when the prototype chains for
3225 // different maps are identical. In that case we can avoid
3226 // repeatedly generating the same prototype map checks.
3227 for (int i = 0; i < number_of_types; ++i) {
3228 Handle<Map> map = types->at(i);
3229 LookupResult lookup;
3230 if (ComputeStoredField(map, name, &lookup)) {
3231 maps.Add(map);
3232 HSubgraph* subgraph = CreateBranchSubgraph(environment());
3233 SubgraphScope scope(this, subgraph);
3234 HInstruction* instr =
3235 BuildStoreNamedField(object, name, value, map, &lookup, false);
3236 Push(value);
3237 instr->set_position(expr->position());
3238 AddInstruction(instr);
3239 subgraphs.Add(subgraph);
3240 } else {
3241 needs_generic = true;
3242 }
3243 }
3244
3245 // If none of the properties were named fields we generate a
3246 // generic store.
3247 if (maps.length() == 0) {
3248 HInstruction* instr = new HStoreNamedGeneric(object, name, value);
3249 Push(value);
3250 instr->set_position(expr->position());
3251 AddInstruction(instr);
Steve Block9fac8402011-05-12 15:51:54 +01003252 if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
3253 ast_context()->ReturnValue(Pop());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003254 } else {
3255 // Build subgraph for generic store through IC.
3256 {
3257 HSubgraph* subgraph = CreateBranchSubgraph(environment());
3258 SubgraphScope scope(this, subgraph);
3259 if (!needs_generic && FLAG_deoptimize_uncommon_cases) {
3260 subgraph->FinishExit(new HDeoptimize());
3261 } else {
3262 HInstruction* instr = new HStoreNamedGeneric(object, name, value);
3263 Push(value);
3264 instr->set_position(expr->position());
3265 AddInstruction(instr);
3266 }
3267 subgraphs.Add(subgraph);
3268 }
3269
3270 HBasicBlock* new_exit_block =
Steve Block9fac8402011-05-12 15:51:54 +01003271 BuildTypeSwitch(&maps, &subgraphs, object, expr->id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003272 subgraph()->set_exit_block(new_exit_block);
Steve Block9fac8402011-05-12 15:51:54 +01003273 // In an effect context, we did not materialized the value in the
3274 // predecessor environments so there's no need to handle it here.
3275 if (subgraph()->HasExit() && !ast_context()->IsEffect()) {
3276 ast_context()->ReturnValue(Pop());
3277 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003278 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003279}
3280
3281
3282void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
3283 Property* prop = expr->target()->AsProperty();
3284 ASSERT(prop != NULL);
3285 expr->RecordTypeFeedback(oracle());
3286 VISIT_FOR_VALUE(prop->obj());
3287
3288 HValue* value = NULL;
3289 HInstruction* instr = NULL;
3290
3291 if (prop->key()->IsPropertyName()) {
3292 // Named store.
3293 VISIT_FOR_VALUE(expr->value());
3294 value = Pop();
3295 HValue* object = Pop();
3296
3297 Literal* key = prop->key()->AsLiteral();
3298 Handle<String> name = Handle<String>::cast(key->handle());
3299 ASSERT(!name.is_null());
3300
3301 ZoneMapList* types = expr->GetReceiverTypes();
3302 LookupResult lookup;
3303
3304 if (expr->IsMonomorphic()) {
3305 instr = BuildStoreNamed(object, value, expr);
3306
3307 } else if (types != NULL && types->length() > 1) {
3308 HandlePolymorphicStoreNamedField(expr, object, value, types, name);
3309 return;
3310
3311 } else {
3312 instr = new HStoreNamedGeneric(object, name, value);
3313 }
3314
3315 } else {
3316 // Keyed store.
3317 VISIT_FOR_VALUE(prop->key());
3318 VISIT_FOR_VALUE(expr->value());
3319 value = Pop();
3320 HValue* key = Pop();
3321 HValue* object = Pop();
3322
3323 bool is_fast_elements = expr->IsMonomorphic() &&
3324 expr->GetMonomorphicReceiverType()->has_fast_elements();
3325
3326 instr = is_fast_elements
3327 ? BuildStoreKeyedFastElement(object, key, value, expr)
3328 : BuildStoreKeyedGeneric(object, key, value);
3329 }
3330
3331 Push(value);
3332 instr->set_position(expr->position());
3333 AddInstruction(instr);
3334 if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
3335 ast_context()->ReturnValue(Pop());
3336}
3337
3338
3339// Because not every expression has a position and there is not common
3340// superclass of Assignment and CountOperation, we cannot just pass the
3341// owning expression instead of position and ast_id separately.
3342void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var,
3343 HValue* value,
3344 int position,
3345 int ast_id) {
3346 LookupResult lookup;
3347 LookupGlobalPropertyCell(var, &lookup, true);
3348 CHECK_BAILOUT;
3349
3350 Handle<GlobalObject> global(graph()->info()->global_object());
3351 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
3352 HInstruction* instr = new HStoreGlobal(value, cell);
3353 instr->set_position(position);
3354 AddInstruction(instr);
3355 if (instr->HasSideEffects()) AddSimulate(ast_id);
3356}
3357
3358
3359void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
3360 Expression* target = expr->target();
3361 VariableProxy* proxy = target->AsVariableProxy();
3362 Variable* var = proxy->AsVariable();
3363 Property* prop = target->AsProperty();
3364 ASSERT(var == NULL || prop == NULL);
3365
3366 // We have a second position recorded in the FullCodeGenerator to have
3367 // type feedback for the binary operation.
3368 BinaryOperation* operation = expr->binary_operation();
3369 operation->RecordTypeFeedback(oracle());
3370
3371 if (var != NULL) {
3372 if (!var->is_global() && !var->IsStackAllocated()) {
3373 BAILOUT("non-stack/non-global in compound assignment");
3374 }
3375
3376 VISIT_FOR_VALUE(operation);
3377
3378 if (var->is_global()) {
3379 HandleGlobalVariableAssignment(var,
3380 Top(),
3381 expr->position(),
3382 expr->AssignmentId());
3383 } else {
3384 Bind(var, Top());
3385 }
3386 ast_context()->ReturnValue(Pop());
3387
3388 } else if (prop != NULL) {
3389 prop->RecordTypeFeedback(oracle());
3390
3391 if (prop->key()->IsPropertyName()) {
3392 // Named property.
3393 VISIT_FOR_VALUE(prop->obj());
3394 HValue* obj = Top();
3395
3396 HInstruction* load = NULL;
3397 if (prop->IsMonomorphic()) {
3398 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
3399 Handle<Map> map = prop->GetReceiverTypes()->first();
3400 load = BuildLoadNamed(obj, prop, map, name);
3401 } else {
3402 load = BuildLoadNamedGeneric(obj, prop);
3403 }
3404 PushAndAdd(load);
3405 if (load->HasSideEffects()) AddSimulate(expr->CompoundLoadId());
3406
3407 VISIT_FOR_VALUE(expr->value());
3408 HValue* right = Pop();
3409 HValue* left = Pop();
3410
3411 HInstruction* instr = BuildBinaryOperation(operation, left, right);
3412 PushAndAdd(instr);
3413 if (instr->HasSideEffects()) AddSimulate(operation->id());
3414
3415 HInstruction* store = BuildStoreNamed(obj, instr, prop);
3416 AddInstruction(store);
3417 // Drop the simulated receiver and value. Return the value.
3418 Drop(2);
3419 Push(instr);
3420 if (store->HasSideEffects()) AddSimulate(expr->AssignmentId());
3421 ast_context()->ReturnValue(Pop());
3422
3423 } else {
3424 // Keyed property.
3425 VISIT_FOR_VALUE(prop->obj());
3426 VISIT_FOR_VALUE(prop->key());
3427 HValue* obj = environment()->ExpressionStackAt(1);
3428 HValue* key = environment()->ExpressionStackAt(0);
3429
3430 bool is_fast_elements = prop->IsMonomorphic() &&
3431 prop->GetMonomorphicReceiverType()->has_fast_elements();
3432
3433 HInstruction* load = is_fast_elements
3434 ? BuildLoadKeyedFastElement(obj, key, prop)
3435 : BuildLoadKeyedGeneric(obj, key);
3436 PushAndAdd(load);
3437 if (load->HasSideEffects()) AddSimulate(expr->CompoundLoadId());
3438
3439 VISIT_FOR_VALUE(expr->value());
3440 HValue* right = Pop();
3441 HValue* left = Pop();
3442
3443 HInstruction* instr = BuildBinaryOperation(operation, left, right);
3444 PushAndAdd(instr);
3445 if (instr->HasSideEffects()) AddSimulate(operation->id());
3446
3447 HInstruction* store = is_fast_elements
3448 ? BuildStoreKeyedFastElement(obj, key, instr, prop)
3449 : BuildStoreKeyedGeneric(obj, key, instr);
3450 AddInstruction(store);
3451 // Drop the simulated receiver, key, and value. Return the value.
3452 Drop(3);
3453 Push(instr);
3454 if (store->HasSideEffects()) AddSimulate(expr->AssignmentId());
3455 ast_context()->ReturnValue(Pop());
3456 }
3457
3458 } else {
3459 BAILOUT("invalid lhs in compound assignment");
3460 }
3461}
3462
3463
3464void HGraphBuilder::VisitAssignment(Assignment* expr) {
3465 VariableProxy* proxy = expr->target()->AsVariableProxy();
3466 Variable* var = proxy->AsVariable();
3467 Property* prop = expr->target()->AsProperty();
3468 ASSERT(var == NULL || prop == NULL);
3469
3470 if (expr->is_compound()) {
3471 HandleCompoundAssignment(expr);
3472 return;
3473 }
3474
3475 if (var != NULL) {
3476 if (proxy->IsArguments()) BAILOUT("assignment to arguments");
3477
3478 // Handle the assignment.
3479 if (var->is_global()) {
3480 VISIT_FOR_VALUE(expr->value());
3481 HandleGlobalVariableAssignment(var,
3482 Top(),
3483 expr->position(),
3484 expr->AssignmentId());
3485 } else {
3486 // We allow reference to the arguments object only in assignemtns
3487 // to local variables to make sure that the arguments object does
3488 // not escape and is not modified.
3489 VariableProxy* rhs = expr->value()->AsVariableProxy();
3490 if (rhs != NULL &&
3491 rhs->var()->IsStackAllocated() &&
3492 environment()->Lookup(rhs->var())->CheckFlag(HValue::kIsArguments)) {
3493 Push(environment()->Lookup(rhs->var()));
3494 } else {
3495 VISIT_FOR_VALUE(expr->value());
3496 }
3497 Bind(proxy->var(), Top());
3498 }
3499 // Return the value.
3500 ast_context()->ReturnValue(Pop());
3501
3502 } else if (prop != NULL) {
3503 HandlePropertyAssignment(expr);
3504 } else {
3505 BAILOUT("unsupported invalid lhs");
3506 }
3507}
3508
3509
3510void HGraphBuilder::VisitThrow(Throw* expr) {
3511 // We don't optimize functions with invalid left-hand sides in
3512 // assignments, count operations, or for-in. Consequently throw can
3513 // currently only occur in an effect context.
3514 ASSERT(ast_context()->IsEffect());
3515 VISIT_FOR_VALUE(expr->exception());
3516
3517 HValue* value = environment()->Pop();
3518 HControlInstruction* instr = new HThrow(value);
3519 instr->set_position(expr->position());
3520 current_subgraph_->FinishExit(instr);
3521}
3522
3523
3524void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
3525 HValue* object,
3526 ZoneMapList* types,
3527 Handle<String> name) {
3528 int number_of_types = Min(types->length(), kMaxLoadPolymorphism);
3529 ZoneMapList maps(number_of_types);
3530 ZoneList<HSubgraph*> subgraphs(number_of_types + 1);
3531 bool needs_generic = (types->length() > kMaxLoadPolymorphism);
3532
3533 // Build subgraphs for each of the specific maps.
3534 //
3535 // TODO(ager): We should recognize when the prototype chains for
3536 // different maps are identical. In that case we can avoid
3537 // repeatedly generating the same prototype map checks.
3538 for (int i = 0; i < number_of_types; ++i) {
3539 Handle<Map> map = types->at(i);
3540 LookupResult lookup;
3541 map->LookupInDescriptors(NULL, *name, &lookup);
3542 if (lookup.IsProperty() && lookup.type() == FIELD) {
3543 maps.Add(map);
3544 HSubgraph* subgraph = CreateBranchSubgraph(environment());
3545 SubgraphScope scope(this, subgraph);
3546 HLoadNamedField* instr =
3547 BuildLoadNamedField(object, expr, map, &lookup, false);
3548 instr->set_position(expr->position());
3549 instr->ClearFlag(HValue::kUseGVN); // Don't do GVN on polymorphic loads.
3550 PushAndAdd(instr);
3551 subgraphs.Add(subgraph);
3552 } else {
3553 needs_generic = true;
3554 }
3555 }
3556
3557 // If none of the properties were named fields we generate a
3558 // generic load.
3559 if (maps.length() == 0) {
3560 HInstruction* instr = BuildLoadNamedGeneric(object, expr);
3561 instr->set_position(expr->position());
Steve Block9fac8402011-05-12 15:51:54 +01003562 ast_context()->ReturnInstruction(instr, expr->id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003563 } else {
3564 // Build subgraph for generic load through IC.
3565 {
3566 HSubgraph* subgraph = CreateBranchSubgraph(environment());
3567 SubgraphScope scope(this, subgraph);
3568 if (!needs_generic && FLAG_deoptimize_uncommon_cases) {
3569 subgraph->FinishExit(new HDeoptimize());
3570 } else {
3571 HInstruction* instr = BuildLoadNamedGeneric(object, expr);
3572 instr->set_position(expr->position());
3573 PushAndAdd(instr);
3574 }
3575 subgraphs.Add(subgraph);
3576 }
3577
3578 HBasicBlock* new_exit_block =
3579 BuildTypeSwitch(&maps, &subgraphs, object, expr->id());
3580 subgraph()->set_exit_block(new_exit_block);
Steve Block9fac8402011-05-12 15:51:54 +01003581 // In an effect context, we did not materialized the value in the
3582 // predecessor environments so there's no need to handle it here.
3583 if (subgraph()->HasExit() && !ast_context()->IsEffect()) {
3584 ast_context()->ReturnValue(Pop());
3585 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003586 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003587}
3588
3589
3590HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
3591 Property* expr,
3592 Handle<Map> type,
3593 LookupResult* lookup,
3594 bool smi_and_map_check) {
3595 if (smi_and_map_check) {
3596 AddInstruction(new HCheckNonSmi(object));
3597 AddInstruction(new HCheckMap(object, type));
3598 }
3599
3600 int index = lookup->GetLocalFieldIndexFromMap(*type);
3601 if (index < 0) {
3602 // Negative property indices are in-object properties, indexed
3603 // from the end of the fixed part of the object.
3604 int offset = (index * kPointerSize) + type->instance_size();
3605 return new HLoadNamedField(object, true, offset);
3606 } else {
3607 // Non-negative property indices are in the properties array.
3608 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
3609 return new HLoadNamedField(object, false, offset);
3610 }
3611}
3612
3613
3614HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* obj,
3615 Property* expr) {
3616 ASSERT(expr->key()->IsPropertyName());
3617 Handle<Object> name = expr->key()->AsLiteral()->handle();
3618 return new HLoadNamedGeneric(obj, name);
3619}
3620
3621
3622HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj,
3623 Property* expr,
3624 Handle<Map> map,
3625 Handle<String> name) {
3626 LookupResult lookup;
3627 map->LookupInDescriptors(NULL, *name, &lookup);
3628 if (lookup.IsProperty() && lookup.type() == FIELD) {
3629 return BuildLoadNamedField(obj,
3630 expr,
3631 map,
3632 &lookup,
3633 true);
3634 } else if (lookup.IsProperty() && lookup.type() == CONSTANT_FUNCTION) {
3635 AddInstruction(new HCheckNonSmi(obj));
3636 AddInstruction(new HCheckMap(obj, map));
3637 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map));
3638 return new HConstant(function, Representation::Tagged());
3639 } else {
3640 return BuildLoadNamedGeneric(obj, expr);
3641 }
3642}
3643
3644
3645HInstruction* HGraphBuilder::BuildLoadKeyedGeneric(HValue* object,
3646 HValue* key) {
3647 return new HLoadKeyedGeneric(object, key);
3648}
3649
3650
3651HInstruction* HGraphBuilder::BuildLoadKeyedFastElement(HValue* object,
3652 HValue* key,
3653 Property* expr) {
3654 ASSERT(!expr->key()->IsPropertyName() && expr->IsMonomorphic());
3655 AddInstruction(new HCheckNonSmi(object));
3656 Handle<Map> map = expr->GetMonomorphicReceiverType();
3657 ASSERT(map->has_fast_elements());
3658 AddInstruction(new HCheckMap(object, map));
Steve Block9fac8402011-05-12 15:51:54 +01003659 bool is_array = (map->instance_type() == JS_ARRAY_TYPE);
3660 HLoadElements* elements = new HLoadElements(object);
3661 HInstruction* length = NULL;
3662 if (is_array) {
3663 length = AddInstruction(new HJSArrayLength(object));
3664 AddInstruction(new HBoundsCheck(key, length));
3665 AddInstruction(elements);
3666 } else {
3667 AddInstruction(elements);
3668 length = AddInstruction(new HFixedArrayLength(elements));
3669 AddInstruction(new HBoundsCheck(key, length));
3670 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003671 return new HLoadKeyedFastElement(elements, key);
3672}
3673
3674
3675HInstruction* HGraphBuilder::BuildStoreKeyedGeneric(HValue* object,
3676 HValue* key,
3677 HValue* value) {
3678 return new HStoreKeyedGeneric(object, key, value);
3679}
3680
3681
3682HInstruction* HGraphBuilder::BuildStoreKeyedFastElement(HValue* object,
3683 HValue* key,
3684 HValue* val,
3685 Expression* expr) {
3686 ASSERT(expr->IsMonomorphic());
3687 AddInstruction(new HCheckNonSmi(object));
3688 Handle<Map> map = expr->GetMonomorphicReceiverType();
3689 ASSERT(map->has_fast_elements());
3690 AddInstruction(new HCheckMap(object, map));
3691 HInstruction* elements = AddInstruction(new HLoadElements(object));
3692 AddInstruction(new HCheckMap(elements, Factory::fixed_array_map()));
3693 bool is_array = (map->instance_type() == JS_ARRAY_TYPE);
3694 HInstruction* length = NULL;
3695 if (is_array) {
Steve Block9fac8402011-05-12 15:51:54 +01003696 length = AddInstruction(new HJSArrayLength(object));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003697 } else {
Steve Block9fac8402011-05-12 15:51:54 +01003698 length = AddInstruction(new HFixedArrayLength(elements));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003699 }
3700 AddInstruction(new HBoundsCheck(key, length));
3701 return new HStoreKeyedFastElement(elements, key, val);
3702}
3703
3704
3705bool HGraphBuilder::TryArgumentsAccess(Property* expr) {
3706 VariableProxy* proxy = expr->obj()->AsVariableProxy();
3707 if (proxy == NULL) return false;
3708 if (!proxy->var()->IsStackAllocated()) return false;
3709 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
3710 return false;
3711 }
3712
3713 HInstruction* result = NULL;
3714 if (expr->key()->IsPropertyName()) {
3715 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
3716 if (!name->IsEqualTo(CStrVector("length"))) return false;
3717 HInstruction* elements = AddInstruction(new HArgumentsElements);
3718 result = new HArgumentsLength(elements);
3719 } else {
3720 VisitForValue(expr->key());
3721 if (HasStackOverflow()) return false;
3722 HValue* key = Pop();
3723 HInstruction* elements = AddInstruction(new HArgumentsElements);
3724 HInstruction* length = AddInstruction(new HArgumentsLength(elements));
3725 AddInstruction(new HBoundsCheck(key, length));
3726 result = new HAccessArgumentsAt(elements, length, key);
3727 }
3728 ast_context()->ReturnInstruction(result, expr->id());
3729 return true;
3730}
3731
3732
3733void HGraphBuilder::VisitProperty(Property* expr) {
3734 expr->RecordTypeFeedback(oracle());
3735
3736 if (TryArgumentsAccess(expr)) return;
3737 CHECK_BAILOUT;
3738
3739 VISIT_FOR_VALUE(expr->obj());
3740
3741 HInstruction* instr = NULL;
3742 if (expr->IsArrayLength()) {
3743 HValue* array = Pop();
3744 AddInstruction(new HCheckNonSmi(array));
Steve Block9fac8402011-05-12 15:51:54 +01003745 AddInstruction(new HCheckInstanceType(array, JS_ARRAY_TYPE, JS_ARRAY_TYPE));
3746 instr = new HJSArrayLength(array);
3747
3748 } else if (expr->IsFunctionPrototype()) {
3749 HValue* function = Pop();
3750 AddInstruction(new HCheckNonSmi(function));
3751 instr = new HLoadFunctionPrototype(function);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003752
3753 } else if (expr->key()->IsPropertyName()) {
3754 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
3755 ZoneMapList* types = expr->GetReceiverTypes();
3756
3757 HValue* obj = Pop();
3758 if (expr->IsMonomorphic()) {
3759 instr = BuildLoadNamed(obj, expr, types->first(), name);
3760 } else if (types != NULL && types->length() > 1) {
3761 HandlePolymorphicLoadNamedField(expr, obj, types, name);
3762 return;
3763
3764 } else {
3765 instr = BuildLoadNamedGeneric(obj, expr);
3766 }
3767
3768 } else {
3769 VISIT_FOR_VALUE(expr->key());
3770
3771 HValue* key = Pop();
3772 HValue* obj = Pop();
3773
3774 bool is_fast_elements = expr->IsMonomorphic() &&
3775 expr->GetMonomorphicReceiverType()->has_fast_elements();
3776
3777 instr = is_fast_elements
3778 ? BuildLoadKeyedFastElement(obj, key, expr)
3779 : BuildLoadKeyedGeneric(obj, key);
3780 }
3781 instr->set_position(expr->position());
3782 ast_context()->ReturnInstruction(instr, expr->id());
3783}
3784
3785
3786void HGraphBuilder::AddCheckConstantFunction(Call* expr,
3787 HValue* receiver,
3788 Handle<Map> receiver_map,
3789 bool smi_and_map_check) {
3790 // Constant functions have the nice property that the map will change if they
3791 // are overwritten. Therefore it is enough to check the map of the holder and
3792 // its prototypes.
3793 if (smi_and_map_check) {
3794 AddInstruction(new HCheckNonSmi(receiver));
3795 AddInstruction(new HCheckMap(receiver, receiver_map));
3796 }
3797 if (!expr->holder().is_null()) {
3798 AddInstruction(new HCheckPrototypeMaps(receiver,
3799 expr->holder(),
3800 receiver_map));
3801 }
3802}
3803
3804
3805void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
3806 HValue* receiver,
3807 ZoneMapList* types,
3808 Handle<String> name) {
3809 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
3810 int number_of_types = Min(types->length(), kMaxCallPolymorphism);
3811 ZoneMapList maps(number_of_types);
3812 ZoneList<HSubgraph*> subgraphs(number_of_types + 1);
3813 bool needs_generic = (types->length() > kMaxCallPolymorphism);
3814
3815 // Build subgraphs for each of the specific maps.
3816 //
3817 // TODO(ager): We should recognize when the prototype chains for different
3818 // maps are identical. In that case we can avoid repeatedly generating the
3819 // same prototype map checks.
3820 for (int i = 0; i < number_of_types; ++i) {
3821 Handle<Map> map = types->at(i);
3822 if (expr->ComputeTarget(map, name)) {
3823 maps.Add(map);
3824 HSubgraph* subgraph = CreateBranchSubgraph(environment());
3825 SubgraphScope scope(this, subgraph);
3826 AddCheckConstantFunction(expr, receiver, map, false);
3827 if (FLAG_trace_inlining && FLAG_polymorphic_inlining) {
3828 PrintF("Trying to inline the polymorphic call to %s\n",
3829 *name->ToCString());
3830 }
3831 if (!FLAG_polymorphic_inlining || !TryInline(expr)) {
3832 // Check for bailout, as trying to inline might fail due to bailout
3833 // during hydrogen processing.
3834 CHECK_BAILOUT;
3835 HCall* call = new HCallConstantFunction(expr->target(), argument_count);
3836 call->set_position(expr->position());
3837 ProcessCall(call);
3838 PushAndAdd(call);
3839 }
3840 subgraphs.Add(subgraph);
3841 } else {
3842 needs_generic = true;
3843 }
3844 }
3845
3846 // If we couldn't compute the target for any of the maps just perform an
3847 // IC call.
3848 if (maps.length() == 0) {
3849 HCall* call = new HCallNamed(name, argument_count);
3850 call->set_position(expr->position());
3851 ProcessCall(call);
3852 ast_context()->ReturnInstruction(call, expr->id());
3853 } else {
3854 // Build subgraph for generic call through IC.
3855 {
3856 HSubgraph* subgraph = CreateBranchSubgraph(environment());
3857 SubgraphScope scope(this, subgraph);
3858 if (!needs_generic && FLAG_deoptimize_uncommon_cases) {
3859 subgraph->FinishExit(new HDeoptimize());
3860 } else {
3861 HCall* call = new HCallNamed(name, argument_count);
3862 call->set_position(expr->position());
3863 ProcessCall(call);
3864 PushAndAdd(call);
3865 }
3866 subgraphs.Add(subgraph);
3867 }
3868
3869 HBasicBlock* new_exit_block =
3870 BuildTypeSwitch(&maps, &subgraphs, receiver, expr->id());
3871 subgraph()->set_exit_block(new_exit_block);
Steve Block9fac8402011-05-12 15:51:54 +01003872 // In an effect context, we did not materialized the value in the
3873 // predecessor environments so there's no need to handle it here.
3874 if (new_exit_block != NULL && !ast_context()->IsEffect()) {
3875 ast_context()->ReturnValue(Pop());
3876 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003877 }
3878}
3879
3880
3881void HGraphBuilder::TraceInline(Handle<JSFunction> target, bool result) {
3882 SmartPointer<char> callee = target->shared()->DebugName()->ToCString();
3883 SmartPointer<char> caller =
3884 graph()->info()->function()->debug_name()->ToCString();
3885 if (result) {
3886 PrintF("Inlined %s called from %s.\n", *callee, *caller);
3887 } else {
3888 PrintF("Do not inline %s called from %s.\n", *callee, *caller);
3889 }
3890}
3891
3892
3893bool HGraphBuilder::TryInline(Call* expr) {
3894 if (!FLAG_use_inlining) return false;
3895
3896 // Precondition: call is monomorphic and we have found a target with the
3897 // appropriate arity.
3898 Handle<JSFunction> target = expr->target();
3899
3900 // Do a quick check on source code length to avoid parsing large
3901 // inlining candidates.
3902 if (FLAG_limit_inlining && target->shared()->SourceSize() > kMaxSourceSize) {
3903 if (FLAG_trace_inlining) TraceInline(target, false);
3904 return false;
3905 }
3906
3907 // Target must be inlineable.
3908 if (!target->IsInlineable()) return false;
3909
3910 // No context change required.
3911 CompilationInfo* outer_info = graph()->info();
3912 if (target->context() != outer_info->closure()->context() ||
3913 outer_info->scope()->contains_with() ||
3914 outer_info->scope()->num_heap_slots() > 0) {
3915 return false;
3916 }
3917
3918 // Don't inline deeper than two calls.
3919 HEnvironment* env = environment();
3920 if (env->outer() != NULL && env->outer()->outer() != NULL) return false;
3921
3922 // Don't inline recursive functions.
3923 if (target->shared() == outer_info->closure()->shared()) return false;
3924
3925 // We don't want to add more than a certain number of nodes from inlining.
3926 if (FLAG_limit_inlining && inlined_count_ > kMaxInlinedNodes) {
3927 if (FLAG_trace_inlining) TraceInline(target, false);
3928 return false;
3929 }
3930
3931 int count_before = AstNode::Count();
3932
3933 // Parse and allocate variables.
3934 Handle<SharedFunctionInfo> shared(target->shared());
3935 CompilationInfo inner_info(shared);
3936 if (!ParserApi::Parse(&inner_info) ||
3937 !Scope::Analyze(&inner_info)) {
3938 return false;
3939 }
3940 FunctionLiteral* function = inner_info.function();
3941
3942 // Count the number of AST nodes added by inlining this call.
3943 int nodes_added = AstNode::Count() - count_before;
3944 if (FLAG_limit_inlining && nodes_added > kMaxInlinedSize) {
3945 if (FLAG_trace_inlining) TraceInline(target, false);
3946 return false;
3947 }
3948
3949 // Check if we can handle all declarations in the inlined functions.
3950 VisitDeclarations(inner_info.scope()->declarations());
3951 if (HasStackOverflow()) {
3952 ClearStackOverflow();
3953 return false;
3954 }
3955
3956 // Don't inline functions that uses the arguments object or that
3957 // have a mismatching number of parameters.
3958 int arity = expr->arguments()->length();
3959 if (function->scope()->arguments() != NULL ||
3960 arity != target->shared()->formal_parameter_count()) {
3961 return false;
3962 }
3963
3964 // All statements in the body must be inlineable.
3965 for (int i = 0, count = function->body()->length(); i < count; ++i) {
3966 if (!function->body()->at(i)->IsInlineable()) return false;
3967 }
3968
3969 // Generate the deoptimization data for the unoptimized version of
3970 // the target function if we don't already have it.
3971 if (!shared->has_deoptimization_support()) {
3972 // Note that we compile here using the same AST that we will use for
3973 // generating the optimized inline code.
3974 inner_info.EnableDeoptimizationSupport();
3975 if (!FullCodeGenerator::MakeCode(&inner_info)) return false;
3976 shared->EnableDeoptimizationSupport(*inner_info.code());
3977 Compiler::RecordFunctionCompilation(
3978 Logger::FUNCTION_TAG,
3979 Handle<String>(shared->DebugName()),
3980 shared->start_position(),
3981 &inner_info);
3982 }
3983
3984 // Save the pending call context and type feedback oracle. Set up new ones
3985 // for the inlined function.
3986 ASSERT(shared->has_deoptimization_support());
3987 AstContext* saved_call_context = call_context();
3988 HBasicBlock* saved_function_return = function_return();
3989 TypeFeedbackOracle* saved_oracle = oracle();
3990 // On-stack replacement cannot target inlined functions. Since we don't
3991 // use a separate CompilationInfo structure for the inlined function, we
3992 // save and restore the AST ID in the original compilation info.
3993 int saved_osr_ast_id = graph()->info()->osr_ast_id();
3994
3995 TestContext* test_context = NULL;
3996 if (ast_context()->IsTest()) {
3997 // Inlined body is treated as if it occurs in an 'inlined' call context
3998 // with true and false blocks that will forward to the real ones.
3999 HBasicBlock* if_true = graph()->CreateBasicBlock();
4000 HBasicBlock* if_false = graph()->CreateBasicBlock();
4001 if_true->MarkAsInlineReturnTarget();
4002 if_false->MarkAsInlineReturnTarget();
4003 // AstContext constructor pushes on the context stack.
4004 test_context = new TestContext(this, if_true, if_false);
4005 function_return_ = NULL;
4006 } else {
4007 // Inlined body is treated as if it occurs in the original call context.
4008 function_return_ = graph()->CreateBasicBlock();
4009 function_return_->MarkAsInlineReturnTarget();
4010 }
4011 call_context_ = ast_context();
4012 TypeFeedbackOracle new_oracle(Handle<Code>(shared->code()));
4013 oracle_ = &new_oracle;
4014 graph()->info()->SetOsrAstId(AstNode::kNoNumber);
4015
4016 HSubgraph* body = CreateInlinedSubgraph(env, target, function);
4017 body->exit_block()->AddInstruction(new HEnterInlined(target, function));
4018 AddToSubgraph(body, function->body());
4019 if (HasStackOverflow()) {
4020 // Bail out if the inline function did, as we cannot residualize a call
4021 // instead.
4022 delete test_context;
4023 call_context_ = saved_call_context;
4024 function_return_ = saved_function_return;
4025 oracle_ = saved_oracle;
4026 graph()->info()->SetOsrAstId(saved_osr_ast_id);
4027 return false;
4028 }
4029
4030 // Update inlined nodes count.
4031 inlined_count_ += nodes_added;
4032
4033 if (FLAG_trace_inlining) TraceInline(target, true);
4034
4035 if (body->HasExit()) {
4036 // Add a return of undefined if control can fall off the body. In a
4037 // test context, undefined is false.
4038 HValue* return_value = graph()->GetConstantUndefined();
4039 if (test_context == NULL) {
4040 ASSERT(function_return_ != NULL);
4041 body->exit_block()->AddLeaveInlined(return_value, function_return_);
4042 } else {
4043 // The graph builder assumes control can reach both branches of a
4044 // test, so we materialize the undefined value and test it rather than
4045 // simply jumping to the false target.
4046 //
4047 // TODO(3168478): refactor to avoid this.
4048 HBasicBlock* empty_true = graph()->CreateBasicBlock();
4049 HBasicBlock* empty_false = graph()->CreateBasicBlock();
4050 HBranch* branch =
4051 new HBranch(empty_true, empty_false, return_value);
4052 body->exit_block()->Finish(branch);
4053
4054 HValue* const no_return_value = NULL;
4055 empty_true->AddLeaveInlined(no_return_value, test_context->if_true());
4056 empty_false->AddLeaveInlined(no_return_value, test_context->if_false());
4057 }
4058 body->set_exit_block(NULL);
4059 }
4060
4061 // Record the environment at the inlined function call.
4062 AddSimulate(expr->ReturnId());
4063
4064 // Jump to the function entry (without re-recording the environment).
4065 subgraph()->exit_block()->Finish(new HGoto(body->entry_block()));
4066
4067 // Fix up the function exits.
4068 if (test_context != NULL) {
4069 HBasicBlock* if_true = test_context->if_true();
4070 HBasicBlock* if_false = test_context->if_false();
4071 if_true->SetJoinId(expr->id());
4072 if_false->SetJoinId(expr->id());
4073 ASSERT(ast_context() == test_context);
4074 delete test_context; // Destructor pops from expression context stack.
4075
4076 // Forward to the real test context.
4077 HValue* const no_return_value = NULL;
4078 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
4079 if (true_target->IsInlineReturnTarget()) {
4080 if_true->AddLeaveInlined(no_return_value, true_target);
4081 } else {
4082 if_true->Goto(true_target);
4083 }
4084
4085 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
4086 if (false_target->IsInlineReturnTarget()) {
4087 if_false->AddLeaveInlined(no_return_value, false_target);
4088 } else {
4089 if_false->Goto(false_target);
4090 }
4091
4092 // TODO(kmillikin): Come up with a better way to handle this. It is too
4093 // subtle. NULL here indicates that the enclosing context has no control
4094 // flow to handle.
4095 subgraph()->set_exit_block(NULL);
4096
4097 } else {
4098 function_return_->SetJoinId(expr->id());
4099 subgraph()->set_exit_block(function_return_);
4100 }
4101
4102 call_context_ = saved_call_context;
4103 function_return_ = saved_function_return;
4104 oracle_ = saved_oracle;
4105 graph()->info()->SetOsrAstId(saved_osr_ast_id);
4106
4107 return true;
4108}
4109
4110
4111void HBasicBlock::AddLeaveInlined(HValue* return_value, HBasicBlock* target) {
4112 ASSERT(target->IsInlineReturnTarget());
4113 AddInstruction(new HLeaveInlined);
4114 HEnvironment* outer = last_environment()->outer();
4115 if (return_value != NULL) outer->Push(return_value);
4116 UpdateEnvironment(outer);
4117 Goto(target);
4118}
4119
4120
4121bool HGraphBuilder::TryMathFunctionInline(Call* expr) {
4122 // Try to inline calls like Math.* as operations in the calling function.
4123 if (!expr->target()->shared()->IsBuiltinMathFunction()) return false;
4124 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
4125 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
4126 switch (id) {
4127 case kMathRound:
4128 case kMathFloor:
4129 case kMathAbs:
4130 case kMathSqrt:
4131 case kMathLog:
4132 case kMathSin:
4133 case kMathCos:
4134 if (argument_count == 2) {
4135 HValue* argument = Pop();
4136 Drop(1); // Receiver.
4137 HUnaryMathOperation* op = new HUnaryMathOperation(argument, id);
4138 op->set_position(expr->position());
4139 ast_context()->ReturnInstruction(op, expr->id());
4140 return true;
4141 }
4142 break;
4143 case kMathPow:
4144 if (argument_count == 3) {
4145 HValue* right = Pop();
4146 HValue* left = Pop();
4147 Pop(); // Pop receiver.
4148 HInstruction* result = NULL;
4149 // Use sqrt() if exponent is 0.5 or -0.5.
4150 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
4151 double exponent = HConstant::cast(right)->DoubleValue();
4152 if (exponent == 0.5) {
4153 result = new HUnaryMathOperation(left, kMathPowHalf);
4154 ast_context()->ReturnInstruction(result, expr->id());
4155 return true;
4156 } else if (exponent == -0.5) {
4157 HConstant* double_one =
4158 new HConstant(Handle<Object>(Smi::FromInt(1)),
4159 Representation::Double());
4160 AddInstruction(double_one);
4161 HUnaryMathOperation* square_root =
4162 new HUnaryMathOperation(left, kMathPowHalf);
4163 AddInstruction(square_root);
4164 // MathPowHalf doesn't have side effects so there's no need for
4165 // an environment simulation here.
4166 ASSERT(!square_root->HasSideEffects());
4167 result = new HDiv(double_one, square_root);
4168 ast_context()->ReturnInstruction(result, expr->id());
4169 return true;
4170 } else if (exponent == 2.0) {
4171 result = new HMul(left, left);
4172 ast_context()->ReturnInstruction(result, expr->id());
4173 return true;
4174 }
4175 } else if (right->IsConstant() &&
4176 HConstant::cast(right)->HasInteger32Value() &&
4177 HConstant::cast(right)->Integer32Value() == 2) {
4178 result = new HMul(left, left);
4179 ast_context()->ReturnInstruction(result, expr->id());
4180 return true;
4181 }
4182
4183 result = new HPower(left, right);
4184 ast_context()->ReturnInstruction(result, expr->id());
4185 return true;
4186 }
4187 break;
4188 default:
4189 // Not yet supported for inlining.
4190 break;
4191 }
4192 return false;
4193}
4194
4195
4196bool HGraphBuilder::TryCallApply(Call* expr) {
4197 Expression* callee = expr->expression();
4198 Property* prop = callee->AsProperty();
4199 ASSERT(prop != NULL);
4200
4201 if (graph()->info()->scope()->arguments() == NULL) return false;
4202
4203 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
4204 if (!name->IsEqualTo(CStrVector("apply"))) return false;
4205
4206 ZoneList<Expression*>* args = expr->arguments();
4207 if (args->length() != 2) return false;
4208
4209 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
4210 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
4211 HValue* arg_two_value = environment()->Lookup(arg_two->var());
4212 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
4213
4214 if (!expr->IsMonomorphic()) return false;
4215
4216 // Found pattern f.apply(receiver, arguments).
4217 VisitForValue(prop->obj());
4218 if (HasStackOverflow()) return false;
4219 HValue* function = Pop();
4220 VisitForValue(args->at(0));
4221 if (HasStackOverflow()) return false;
4222 HValue* receiver = Pop();
4223 HInstruction* elements = AddInstruction(new HArgumentsElements);
4224 HInstruction* length = AddInstruction(new HArgumentsLength(elements));
4225 AddCheckConstantFunction(expr,
4226 function,
4227 expr->GetReceiverTypes()->first(),
4228 true);
4229 HInstruction* result =
4230 new HApplyArguments(function, receiver, length, elements);
4231 result->set_position(expr->position());
4232 ast_context()->ReturnInstruction(result, expr->id());
4233 return true;
4234}
4235
4236
4237void HGraphBuilder::VisitCall(Call* expr) {
4238 Expression* callee = expr->expression();
4239 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
4240 HCall* call = NULL;
4241
4242 Property* prop = callee->AsProperty();
4243 if (prop != NULL) {
4244 if (!prop->key()->IsPropertyName()) {
4245 // Keyed function call.
4246 VisitArgument(prop->obj());
4247 CHECK_BAILOUT;
4248
4249 VISIT_FOR_VALUE(prop->key());
4250 // Push receiver and key like the non-optimized code generator expects it.
4251 HValue* key = Pop();
4252 HValue* receiver = Pop();
4253 Push(key);
4254 Push(receiver);
4255
4256 VisitArgumentList(expr->arguments());
4257 CHECK_BAILOUT;
4258
4259 call = new HCallKeyed(key, argument_count);
4260 call->set_position(expr->position());
4261 ProcessCall(call);
4262 Drop(1); // Key.
4263 ast_context()->ReturnInstruction(call, expr->id());
4264 return;
4265 }
4266
4267 // Named function call.
4268 expr->RecordTypeFeedback(oracle());
4269
4270 if (TryCallApply(expr)) return;
4271 CHECK_BAILOUT;
4272
4273 HValue* receiver = VisitArgument(prop->obj());
4274 CHECK_BAILOUT;
4275 VisitArgumentList(expr->arguments());
4276 CHECK_BAILOUT;
4277
4278 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
4279
4280 expr->RecordTypeFeedback(oracle());
4281 ZoneMapList* types = expr->GetReceiverTypes();
4282
4283 if (expr->IsMonomorphic()) {
4284 AddCheckConstantFunction(expr, receiver, types->first(), true);
4285
4286 if (TryMathFunctionInline(expr)) {
4287 return;
4288 } else if (TryInline(expr)) {
4289 if (subgraph()->HasExit()) {
4290 HValue* return_value = Pop();
4291 // If we inlined a function in a test context then we need to emit
4292 // a simulate here to shadow the ones at the end of the
4293 // predecessor blocks. Those environments contain the return
4294 // value on top and do not correspond to any actual state of the
4295 // unoptimized code.
4296 if (ast_context()->IsEffect()) AddSimulate(expr->id());
4297 ast_context()->ReturnValue(return_value);
4298 }
4299 return;
4300 } else {
4301 // Check for bailout, as the TryInline call in the if condition above
4302 // might return false due to bailout during hydrogen processing.
4303 CHECK_BAILOUT;
4304 call = new HCallConstantFunction(expr->target(), argument_count);
4305 }
4306
4307 } else if (types != NULL && types->length() > 1) {
4308 HandlePolymorphicCallNamed(expr, receiver, types, name);
4309 return;
4310
4311 } else {
4312 call = new HCallNamed(name, argument_count);
4313 }
4314
4315 } else {
4316 Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
4317 bool global_call = (var != NULL) && var->is_global() && !var->is_this();
4318
4319 if (!global_call) {
4320 ++argument_count;
4321 VisitArgument(expr->expression());
4322 CHECK_BAILOUT;
4323 }
4324
4325 if (global_call) {
4326 // If there is a global property cell for the name at compile time and
4327 // access check is not enabled we assume that the function will not change
4328 // and generate optimized code for calling the function.
4329 CompilationInfo* info = graph()->info();
4330 bool known_global_function = info->has_global_object() &&
4331 !info->global_object()->IsAccessCheckNeeded() &&
4332 expr->ComputeGlobalTarget(Handle<GlobalObject>(info->global_object()),
4333 var->name());
4334 if (known_global_function) {
4335 // Push the global object instead of the global receiver because
4336 // code generated by the full code generator expects it.
4337 PushAndAdd(new HGlobalObject);
4338 VisitArgumentList(expr->arguments());
4339 CHECK_BAILOUT;
4340
4341 VISIT_FOR_VALUE(expr->expression());
4342 HValue* function = Pop();
4343 AddInstruction(new HCheckFunction(function, expr->target()));
4344
4345 // Replace the global object with the global receiver.
4346 HGlobalReceiver* global_receiver = new HGlobalReceiver;
4347 // Index of the receiver from the top of the expression stack.
4348 const int receiver_index = argument_count - 1;
4349 AddInstruction(global_receiver);
4350 ASSERT(environment()->ExpressionStackAt(receiver_index)->
4351 IsGlobalObject());
4352 environment()->SetExpressionStackAt(receiver_index, global_receiver);
4353
4354 if (TryInline(expr)) {
4355 if (subgraph()->HasExit()) {
4356 HValue* return_value = Pop();
4357 // If we inlined a function in a test context then we need to
4358 // emit a simulate here to shadow the ones at the end of the
4359 // predecessor blocks. Those environments contain the return
4360 // value on top and do not correspond to any actual state of the
4361 // unoptimized code.
4362 if (ast_context()->IsEffect()) AddSimulate(expr->id());
4363 ast_context()->ReturnValue(return_value);
4364 }
4365 return;
4366 }
4367 // Check for bailout, as trying to inline might fail due to bailout
4368 // during hydrogen processing.
4369 CHECK_BAILOUT;
4370
4371 call = new HCallKnownGlobal(expr->target(), argument_count);
4372 } else {
4373 PushAndAdd(new HGlobalObject);
4374 VisitArgumentList(expr->arguments());
4375 CHECK_BAILOUT;
4376
4377 call = new HCallGlobal(var->name(), argument_count);
4378 }
4379
4380 } else {
4381 PushAndAdd(new HGlobalReceiver);
4382 VisitArgumentList(expr->arguments());
4383 CHECK_BAILOUT;
4384
4385 call = new HCallFunction(argument_count);
4386 }
4387 }
4388
4389 call->set_position(expr->position());
4390 ProcessCall(call);
4391 ast_context()->ReturnInstruction(call, expr->id());
4392}
4393
4394
4395void HGraphBuilder::VisitCallNew(CallNew* expr) {
4396 // The constructor function is also used as the receiver argument to the
4397 // JS construct call builtin.
4398 VisitArgument(expr->expression());
4399 CHECK_BAILOUT;
4400 VisitArgumentList(expr->arguments());
4401 CHECK_BAILOUT;
4402
4403 int argument_count = expr->arguments()->length() + 1; // Plus constructor.
4404 HCall* call = new HCallNew(argument_count);
4405 call->set_position(expr->position());
4406 ProcessCall(call);
4407 ast_context()->ReturnInstruction(call, expr->id());
4408}
4409
4410
4411// Support for generating inlined runtime functions.
4412
4413// Lookup table for generators for runtime calls that are generated inline.
4414// Elements of the table are member pointers to functions of HGraphBuilder.
4415#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
4416 &HGraphBuilder::Generate##Name,
4417
4418const HGraphBuilder::InlineFunctionGenerator
4419 HGraphBuilder::kInlineFunctionGenerators[] = {
4420 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
4421 INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
4422};
4423#undef INLINE_FUNCTION_GENERATOR_ADDRESS
4424
4425
4426void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
4427 Handle<String> name = expr->name();
4428 if (name->IsEqualTo(CStrVector("_Log"))) {
4429 ast_context()->ReturnValue(graph()->GetConstantUndefined());
4430 return;
4431 }
4432
4433 Runtime::Function* function = expr->function();
4434 if (expr->is_jsruntime()) {
4435 BAILOUT("call to a JavaScript runtime function");
4436 }
4437 ASSERT(function != NULL);
4438
4439 VisitArgumentList(expr->arguments());
4440 CHECK_BAILOUT;
4441
4442 int argument_count = expr->arguments()->length();
4443 if (function->intrinsic_type == Runtime::INLINE) {
4444 ASSERT(name->length() > 0);
4445 ASSERT(name->Get(0) == '_');
4446 // Call to an inline function.
4447 int lookup_index = static_cast<int>(function->function_id) -
4448 static_cast<int>(Runtime::kFirstInlineFunction);
4449 ASSERT(lookup_index >= 0);
4450 ASSERT(static_cast<size_t>(lookup_index) <
4451 ARRAY_SIZE(kInlineFunctionGenerators));
4452 InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
4453
4454 // Call the inline code generator using the pointer-to-member.
4455 (this->*generator)(argument_count, expr->id());
4456 } else {
4457 ASSERT(function->intrinsic_type == Runtime::RUNTIME);
4458 HCall* call = new HCallRuntime(name, expr->function(), argument_count);
4459 call->set_position(RelocInfo::kNoPosition);
4460 ProcessCall(call);
4461 ast_context()->ReturnInstruction(call, expr->id());
4462 }
4463}
4464
4465
4466void HGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
4467 Token::Value op = expr->op();
4468 if (op == Token::VOID) {
4469 VISIT_FOR_EFFECT(expr->expression());
4470 ast_context()->ReturnValue(graph()->GetConstantUndefined());
4471 } else if (op == Token::DELETE) {
4472 Property* prop = expr->expression()->AsProperty();
4473 Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
4474 if (prop == NULL && var == NULL) {
4475 // Result of deleting non-property, non-variable reference is true.
4476 // Evaluate the subexpression for side effects.
4477 VISIT_FOR_EFFECT(expr->expression());
4478 ast_context()->ReturnValue(graph()->GetConstantTrue());
4479 } else if (var != NULL &&
4480 !var->is_global() &&
4481 var->AsSlot() != NULL &&
4482 var->AsSlot()->type() != Slot::LOOKUP) {
4483 // Result of deleting non-global, non-dynamic variables is false.
4484 // The subexpression does not have side effects.
4485 ast_context()->ReturnValue(graph()->GetConstantFalse());
4486 } else if (prop != NULL) {
4487 VISIT_FOR_VALUE(prop->obj());
4488 VISIT_FOR_VALUE(prop->key());
4489 HValue* key = Pop();
4490 HValue* obj = Pop();
4491 ast_context()->ReturnInstruction(new HDeleteProperty(obj, key),
4492 expr->id());
4493 } else if (var->is_global()) {
4494 BAILOUT("delete with global variable");
4495 } else {
4496 BAILOUT("delete with non-global variable");
4497 }
4498 } else if (op == Token::NOT) {
4499 if (ast_context()->IsTest()) {
4500 TestContext* context = TestContext::cast(ast_context());
4501 VisitForControl(expr->expression(),
4502 context->if_false(),
4503 context->if_true());
4504 } else {
4505 HSubgraph* true_graph = CreateEmptySubgraph();
4506 HSubgraph* false_graph = CreateEmptySubgraph();
4507 VISIT_FOR_CONTROL(expr->expression(),
4508 false_graph->entry_block(),
4509 true_graph->entry_block());
4510 true_graph->entry_block()->SetJoinId(expr->expression()->id());
4511 true_graph->environment()->Push(graph_->GetConstantTrue());
4512
4513 false_graph->entry_block()->SetJoinId(expr->expression()->id());
4514 false_graph->environment()->Push(graph_->GetConstantFalse());
4515
4516 current_subgraph_->AppendJoin(true_graph, false_graph, expr);
4517 ast_context()->ReturnValue(Pop());
4518 }
4519 } else if (op == Token::BIT_NOT || op == Token::SUB) {
4520 VISIT_FOR_VALUE(expr->expression());
4521 HValue* value = Pop();
4522 HInstruction* instr = NULL;
4523 switch (op) {
4524 case Token::BIT_NOT:
4525 instr = new HBitNot(value);
4526 break;
4527 case Token::SUB:
4528 instr = new HMul(graph_->GetConstantMinus1(), value);
4529 break;
4530 default:
4531 UNREACHABLE();
4532 break;
4533 }
4534 ast_context()->ReturnInstruction(instr, expr->id());
4535 } else if (op == Token::TYPEOF) {
4536 VISIT_FOR_VALUE(expr->expression());
4537 HValue* value = Pop();
4538 ast_context()->ReturnInstruction(new HTypeof(value), expr->id());
4539 } else {
4540 BAILOUT("Value: unsupported unary operation");
4541 }
4542}
4543
4544
4545void HGraphBuilder::VisitIncrementOperation(IncrementOperation* expr) {
4546 // IncrementOperation is never visited by the visitor. It only
4547 // occurs as a subexpression of CountOperation.
4548 UNREACHABLE();
4549}
4550
4551
4552HInstruction* HGraphBuilder::BuildIncrement(HValue* value, bool increment) {
4553 HConstant* delta = increment
4554 ? graph_->GetConstant1()
4555 : graph_->GetConstantMinus1();
4556 HInstruction* instr = new HAdd(value, delta);
4557 AssumeRepresentation(instr, Representation::Integer32());
4558 return instr;
4559}
4560
4561
4562void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
4563 IncrementOperation* increment = expr->increment();
4564 Expression* target = increment->expression();
4565 VariableProxy* proxy = target->AsVariableProxy();
4566 Variable* var = proxy->AsVariable();
4567 Property* prop = target->AsProperty();
4568 ASSERT(var == NULL || prop == NULL);
4569 bool inc = expr->op() == Token::INC;
4570
4571 if (var != NULL) {
4572 if (!var->is_global() && !var->IsStackAllocated()) {
4573 BAILOUT("non-stack/non-global variable in count operation");
4574 }
4575
4576 VISIT_FOR_VALUE(target);
4577
4578 // Match the full code generator stack by simulating an extra stack
4579 // element for postfix operations in a non-effect context.
4580 bool has_extra = expr->is_postfix() && !ast_context()->IsEffect();
4581 HValue* before = has_extra ? Top() : Pop();
4582 HInstruction* after = BuildIncrement(before, inc);
4583 AddInstruction(after);
4584 Push(after);
4585
4586 if (var->is_global()) {
4587 HandleGlobalVariableAssignment(var,
4588 after,
4589 expr->position(),
4590 expr->AssignmentId());
4591 } else {
4592 ASSERT(var->IsStackAllocated());
4593 Bind(var, after);
4594 }
4595 Drop(has_extra ? 2 : 1);
4596 ast_context()->ReturnValue(expr->is_postfix() ? before : after);
4597
4598 } else if (prop != NULL) {
4599 prop->RecordTypeFeedback(oracle());
4600
4601 if (prop->key()->IsPropertyName()) {
4602 // Named property.
4603
4604 // Match the full code generator stack by simulating an extra stack
4605 // element for postfix operations in a non-effect context.
4606 bool has_extra = expr->is_postfix() && !ast_context()->IsEffect();
4607 if (has_extra) Push(graph_->GetConstantUndefined());
4608
4609 VISIT_FOR_VALUE(prop->obj());
4610 HValue* obj = Top();
4611
4612 HInstruction* load = NULL;
4613 if (prop->IsMonomorphic()) {
4614 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
4615 Handle<Map> map = prop->GetReceiverTypes()->first();
4616 load = BuildLoadNamed(obj, prop, map, name);
4617 } else {
4618 load = BuildLoadNamedGeneric(obj, prop);
4619 }
4620 PushAndAdd(load);
4621 if (load->HasSideEffects()) AddSimulate(increment->id());
4622
4623 HValue* before = Pop();
4624 // There is no deoptimization to after the increment, so we don't need
4625 // to simulate the expression stack after this instruction.
4626 HInstruction* after = BuildIncrement(before, inc);
4627 AddInstruction(after);
4628
4629 HInstruction* store = BuildStoreNamed(obj, after, prop);
4630 AddInstruction(store);
4631
4632 // Overwrite the receiver in the bailout environment with the result
4633 // of the operation, and the placeholder with the original value if
4634 // necessary.
4635 environment()->SetExpressionStackAt(0, after);
4636 if (has_extra) environment()->SetExpressionStackAt(1, before);
4637 if (store->HasSideEffects()) AddSimulate(expr->AssignmentId());
4638 Drop(has_extra ? 2 : 1);
4639
4640 ast_context()->ReturnValue(expr->is_postfix() ? before : after);
4641
4642 } else {
4643 // Keyed property.
4644
4645 // Match the full code generator stack by simulate an extra stack element
4646 // for postfix operations in a non-effect context.
4647 bool has_extra = expr->is_postfix() && !ast_context()->IsEffect();
4648 if (has_extra) Push(graph_->GetConstantUndefined());
4649
4650 VISIT_FOR_VALUE(prop->obj());
4651 VISIT_FOR_VALUE(prop->key());
4652 HValue* obj = environment()->ExpressionStackAt(1);
4653 HValue* key = environment()->ExpressionStackAt(0);
4654
4655 bool is_fast_elements = prop->IsMonomorphic() &&
4656 prop->GetMonomorphicReceiverType()->has_fast_elements();
4657
4658 HInstruction* load = is_fast_elements
4659 ? BuildLoadKeyedFastElement(obj, key, prop)
4660 : BuildLoadKeyedGeneric(obj, key);
4661 PushAndAdd(load);
4662 if (load->HasSideEffects()) AddSimulate(increment->id());
4663
4664 HValue* before = Pop();
4665 // There is no deoptimization to after the increment, so we don't need
4666 // to simulate the expression stack after this instruction.
4667 HInstruction* after = BuildIncrement(before, inc);
4668 AddInstruction(after);
4669
4670 HInstruction* store = is_fast_elements
4671 ? BuildStoreKeyedFastElement(obj, key, after, prop)
4672 : new HStoreKeyedGeneric(obj, key, after);
4673 AddInstruction(store);
4674
4675 // Drop the key from the bailout environment. Overwrite the receiver
4676 // with the result of the operation, and the placeholder with the
4677 // original value if necessary.
4678 Drop(1);
4679 environment()->SetExpressionStackAt(0, after);
4680 if (has_extra) environment()->SetExpressionStackAt(1, before);
4681 if (store->HasSideEffects()) AddSimulate(expr->AssignmentId());
4682 Drop(has_extra ? 2 : 1);
4683
4684 ast_context()->ReturnValue(expr->is_postfix() ? before : after);
4685 }
4686
4687 } else {
4688 BAILOUT("invalid lhs in count operation");
4689 }
4690}
4691
4692
4693HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
4694 HValue* left,
4695 HValue* right) {
4696 HInstruction* instr = NULL;
4697 switch (expr->op()) {
4698 case Token::ADD:
4699 instr = new HAdd(left, right);
4700 break;
4701 case Token::SUB:
4702 instr = new HSub(left, right);
4703 break;
4704 case Token::MUL:
4705 instr = new HMul(left, right);
4706 break;
4707 case Token::MOD:
4708 instr = new HMod(left, right);
4709 break;
4710 case Token::DIV:
4711 instr = new HDiv(left, right);
4712 break;
4713 case Token::BIT_XOR:
4714 instr = new HBitXor(left, right);
4715 break;
4716 case Token::BIT_AND:
4717 instr = new HBitAnd(left, right);
4718 break;
4719 case Token::BIT_OR:
4720 instr = new HBitOr(left, right);
4721 break;
4722 case Token::SAR:
4723 instr = new HSar(left, right);
4724 break;
4725 case Token::SHR:
4726 instr = new HShr(left, right);
4727 break;
4728 case Token::SHL:
4729 instr = new HShl(left, right);
4730 break;
4731 default:
4732 UNREACHABLE();
4733 }
4734 TypeInfo info = oracle()->BinaryType(expr, TypeFeedbackOracle::RESULT);
4735 // If we hit an uninitialized binary op stub we will get type info
4736 // for a smi operation. If one of the operands is a constant string
4737 // do not generate code assuming it is a smi operation.
4738 if (info.IsSmi() &&
4739 ((left->IsConstant() && HConstant::cast(left)->HasStringValue()) ||
4740 (right->IsConstant() && HConstant::cast(right)->HasStringValue()))) {
4741 return instr;
4742 }
4743 if (FLAG_trace_representation) {
4744 PrintF("Info: %s/%s\n", info.ToString(), ToRepresentation(info).Mnemonic());
4745 }
4746 AssumeRepresentation(instr, ToRepresentation(info));
4747 return instr;
4748}
4749
4750
4751// Check for the form (%_ClassOf(foo) === 'BarClass').
4752static bool IsClassOfTest(CompareOperation* expr) {
4753 if (expr->op() != Token::EQ_STRICT) return false;
4754 CallRuntime* call = expr->left()->AsCallRuntime();
4755 if (call == NULL) return false;
4756 Literal* literal = expr->right()->AsLiteral();
4757 if (literal == NULL) return false;
4758 if (!literal->handle()->IsString()) return false;
4759 if (!call->name()->IsEqualTo(CStrVector("_ClassOf"))) return false;
4760 ASSERT(call->arguments()->length() == 1);
4761 return true;
4762}
4763
4764
4765void HGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
4766 if (expr->op() == Token::COMMA) {
4767 VISIT_FOR_EFFECT(expr->left());
4768 // Visit the right subexpression in the same AST context as the entire
4769 // expression.
4770 Visit(expr->right());
4771
4772 } else if (expr->op() == Token::AND || expr->op() == Token::OR) {
4773 bool is_logical_and = (expr->op() == Token::AND);
4774 if (ast_context()->IsTest()) {
4775 TestContext* context = TestContext::cast(ast_context());
4776 // Translate left subexpression.
4777 HBasicBlock* eval_right = graph()->CreateBasicBlock();
4778 if (is_logical_and) {
4779 VISIT_FOR_CONTROL(expr->left(), eval_right, context->if_false());
4780 } else {
4781 VISIT_FOR_CONTROL(expr->left(), context->if_true(), eval_right);
4782 }
4783 eval_right->SetJoinId(expr->RightId());
4784
4785 // Translate right subexpression by visiting it in the same AST
4786 // context as the entire expression.
4787 subgraph()->set_exit_block(eval_right);
4788 Visit(expr->right());
4789
4790 } else {
4791 VISIT_FOR_VALUE(expr->left());
4792 ASSERT(current_subgraph_->HasExit());
4793
4794 HValue* left = Top();
4795 HEnvironment* environment_copy = environment()->Copy();
4796 environment_copy->Pop();
4797 HSubgraph* right_subgraph;
4798 right_subgraph = CreateBranchSubgraph(environment_copy);
4799 ADD_TO_SUBGRAPH(right_subgraph, expr->right());
4800 current_subgraph_->AppendOptional(right_subgraph, is_logical_and, left);
4801 current_subgraph_->exit_block()->SetJoinId(expr->id());
4802 ast_context()->ReturnValue(Pop());
4803 }
4804
4805 } else {
4806 VISIT_FOR_VALUE(expr->left());
4807 VISIT_FOR_VALUE(expr->right());
4808
4809 HValue* right = Pop();
4810 HValue* left = Pop();
4811 HInstruction* instr = BuildBinaryOperation(expr, left, right);
4812 instr->set_position(expr->position());
4813 ast_context()->ReturnInstruction(instr, expr->id());
4814 }
4815}
4816
4817
4818void HGraphBuilder::AssumeRepresentation(HValue* value, Representation r) {
4819 if (value->CheckFlag(HValue::kFlexibleRepresentation)) {
4820 if (FLAG_trace_representation) {
4821 PrintF("Assume representation for %s to be %s (%d)\n",
4822 value->Mnemonic(),
4823 r.Mnemonic(),
4824 graph_->GetMaximumValueID());
4825 }
4826 value->ChangeRepresentation(r);
4827 // The representation of the value is dictated by type feedback.
4828 value->ClearFlag(HValue::kFlexibleRepresentation);
4829 } else if (FLAG_trace_representation) {
4830 PrintF("No representation assumed\n");
4831 }
4832}
4833
4834
4835Representation HGraphBuilder::ToRepresentation(TypeInfo info) {
4836 if (info.IsSmi()) return Representation::Integer32();
4837 if (info.IsInteger32()) return Representation::Integer32();
4838 if (info.IsDouble()) return Representation::Double();
4839 if (info.IsNumber()) return Representation::Double();
4840 return Representation::Tagged();
4841}
4842
4843
4844void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
4845 if (IsClassOfTest(expr)) {
4846 CallRuntime* call = expr->left()->AsCallRuntime();
4847 VISIT_FOR_VALUE(call->arguments()->at(0));
4848 HValue* value = Pop();
4849 Literal* literal = expr->right()->AsLiteral();
4850 Handle<String> rhs = Handle<String>::cast(literal->handle());
4851 HInstruction* instr = new HClassOfTest(value, rhs);
4852 instr->set_position(expr->position());
4853 ast_context()->ReturnInstruction(instr, expr->id());
4854 return;
4855 }
4856
4857 // Check for the pattern: typeof <expression> == <string literal>.
4858 UnaryOperation* left_unary = expr->left()->AsUnaryOperation();
4859 Literal* right_literal = expr->right()->AsLiteral();
4860 if ((expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT) &&
4861 left_unary != NULL && left_unary->op() == Token::TYPEOF &&
4862 right_literal != NULL && right_literal->handle()->IsString()) {
4863 VISIT_FOR_VALUE(left_unary->expression());
4864 HValue* left = Pop();
4865 HInstruction* instr = new HTypeofIs(left,
4866 Handle<String>::cast(right_literal->handle()));
4867 instr->set_position(expr->position());
4868 ast_context()->ReturnInstruction(instr, expr->id());
4869 return;
4870 }
4871
4872 VISIT_FOR_VALUE(expr->left());
4873 VISIT_FOR_VALUE(expr->right());
4874
4875 HValue* right = Pop();
4876 HValue* left = Pop();
4877 Token::Value op = expr->op();
4878
4879 TypeInfo info = oracle()->CompareType(expr, TypeFeedbackOracle::RESULT);
4880 HInstruction* instr = NULL;
4881 if (op == Token::INSTANCEOF) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004882 // Check to see if the rhs of the instanceof is a global function not
4883 // residing in new space. If it is we assume that the function will stay the
4884 // same.
4885 Handle<JSFunction> target = Handle<JSFunction>::null();
4886 Variable* var = expr->right()->AsVariableProxy()->AsVariable();
4887 bool global_function = (var != NULL) && var->is_global() && !var->is_this();
4888 CompilationInfo* info = graph()->info();
4889 if (global_function &&
4890 info->has_global_object() &&
4891 !info->global_object()->IsAccessCheckNeeded()) {
4892 Handle<String> name = var->name();
4893 Handle<GlobalObject> global(info->global_object());
4894 LookupResult lookup;
4895 global->Lookup(*name, &lookup);
4896 if (lookup.IsProperty() &&
4897 lookup.type() == NORMAL &&
4898 lookup.GetValue()->IsJSFunction()) {
4899 Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
4900 // If the function is in new space we assume it's more likely to
4901 // change and thus prefer the general IC code.
4902 if (!Heap::InNewSpace(*candidate)) {
4903 target = candidate;
4904 }
4905 }
4906 }
4907
4908 // If the target is not null we have found a known global function that is
4909 // assumed to stay the same for this instanceof.
4910 if (target.is_null()) {
4911 instr = new HInstanceOf(left, right);
4912 } else {
4913 AddInstruction(new HCheckFunction(right, target));
4914 instr = new HInstanceOfKnownGlobal(left, target);
4915 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004916 } else if (op == Token::IN) {
4917 BAILOUT("Unsupported comparison: in");
4918 } else if (info.IsNonPrimitive()) {
4919 switch (op) {
4920 case Token::EQ:
4921 case Token::EQ_STRICT: {
Steve Block9fac8402011-05-12 15:51:54 +01004922 AddInstruction(new HCheckNonSmi(left));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004923 AddInstruction(HCheckInstanceType::NewIsJSObjectOrJSFunction(left));
Steve Block9fac8402011-05-12 15:51:54 +01004924 AddInstruction(new HCheckNonSmi(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004925 AddInstruction(HCheckInstanceType::NewIsJSObjectOrJSFunction(right));
4926 instr = new HCompareJSObjectEq(left, right);
4927 break;
4928 }
4929 default:
4930 BAILOUT("Unsupported non-primitive compare");
4931 break;
4932 }
4933 } else {
4934 HCompare* compare = new HCompare(left, right, op);
4935 Representation r = ToRepresentation(info);
4936 compare->SetInputRepresentation(r);
4937 instr = compare;
4938 }
4939 instr->set_position(expr->position());
4940 ast_context()->ReturnInstruction(instr, expr->id());
4941}
4942
4943
4944void HGraphBuilder::VisitCompareToNull(CompareToNull* expr) {
4945 VISIT_FOR_VALUE(expr->expression());
4946
4947 HValue* value = Pop();
4948 HIsNull* compare = new HIsNull(value, expr->is_strict());
4949 ast_context()->ReturnInstruction(compare, expr->id());
4950}
4951
4952
4953void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
4954 BAILOUT("ThisFunction");
4955}
4956
4957
4958void HGraphBuilder::VisitDeclaration(Declaration* decl) {
4959 // We allow only declarations that do not require code generation.
4960 // The following all require code generation: global variables and
4961 // functions, variables with slot type LOOKUP, declarations with
4962 // mode CONST, and functions.
4963 Variable* var = decl->proxy()->var();
4964 Slot* slot = var->AsSlot();
4965 if (var->is_global() ||
4966 (slot != NULL && slot->type() == Slot::LOOKUP) ||
4967 decl->mode() == Variable::CONST ||
4968 decl->fun() != NULL) {
4969 BAILOUT("unsupported declaration");
4970 }
4971}
4972
4973
4974// Generators for inline runtime functions.
4975// Support for types.
4976void HGraphBuilder::GenerateIsSmi(int argument_count, int ast_id) {
4977 ASSERT(argument_count == 1);
4978 HValue* value = Pop();
4979 HIsSmi* result = new HIsSmi(value);
4980 ast_context()->ReturnInstruction(result, ast_id);
4981}
4982
4983
4984void HGraphBuilder::GenerateIsSpecObject(int argument_count, int ast_id) {
4985 ASSERT(argument_count == 1);
4986 HValue* value = Pop();
4987 HHasInstanceType* result =
4988 new HHasInstanceType(value, FIRST_JS_OBJECT_TYPE, LAST_TYPE);
4989 ast_context()->ReturnInstruction(result, ast_id);
4990}
4991
4992
4993void HGraphBuilder::GenerateIsFunction(int argument_count, int ast_id) {
4994 ASSERT(argument_count == 1);
4995 HValue* value = Pop();
4996 HHasInstanceType* result = new HHasInstanceType(value, JS_FUNCTION_TYPE);
4997 ast_context()->ReturnInstruction(result, ast_id);
4998}
4999
5000
5001void HGraphBuilder::GenerateHasCachedArrayIndex(int argument_count,
5002 int ast_id) {
5003 ASSERT(argument_count == 1);
5004 HValue* value = Pop();
5005 HHasCachedArrayIndex* result = new HHasCachedArrayIndex(value);
5006 ast_context()->ReturnInstruction(result, ast_id);
5007}
5008
5009
5010void HGraphBuilder::GenerateIsArray(int argument_count, int ast_id) {
5011 ASSERT(argument_count == 1);
5012 HValue* value = Pop();
5013 HHasInstanceType* result = new HHasInstanceType(value, JS_ARRAY_TYPE);
5014 ast_context()->ReturnInstruction(result, ast_id);
5015}
5016
5017
5018void HGraphBuilder::GenerateIsRegExp(int argument_count, int ast_id) {
5019 ASSERT(argument_count == 1);
5020 HValue* value = Pop();
5021 HHasInstanceType* result = new HHasInstanceType(value, JS_REGEXP_TYPE);
5022 ast_context()->ReturnInstruction(result, ast_id);
5023}
5024
5025
5026void HGraphBuilder::GenerateIsObject(int argument_count, int ast_id) {
5027 ASSERT(argument_count == 1);
5028
5029 HValue* value = Pop();
5030 HIsObject* test = new HIsObject(value);
5031 ast_context()->ReturnInstruction(test, ast_id);
5032}
5033
5034
5035void HGraphBuilder::GenerateIsNonNegativeSmi(int argument_count,
5036 int ast_id) {
5037 BAILOUT("inlined runtime function: IsNonNegativeSmi");
5038}
5039
5040
5041void HGraphBuilder::GenerateIsUndetectableObject(int argument_count,
5042 int ast_id) {
5043 BAILOUT("inlined runtime function: IsUndetectableObject");
5044}
5045
5046
5047void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
5048 int argument_count,
5049 int ast_id) {
5050 BAILOUT("inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
5051}
5052
5053
5054 // Support for construct call checks.
5055void HGraphBuilder::GenerateIsConstructCall(int argument_count, int ast_id) {
5056 BAILOUT("inlined runtime function: IsConstructCall");
5057}
5058
5059
5060// Support for arguments.length and arguments[?].
5061void HGraphBuilder::GenerateArgumentsLength(int argument_count, int ast_id) {
5062 ASSERT(argument_count == 0);
5063 HInstruction* elements = AddInstruction(new HArgumentsElements);
5064 HArgumentsLength* result = new HArgumentsLength(elements);
5065 ast_context()->ReturnInstruction(result, ast_id);
5066}
5067
5068
5069void HGraphBuilder::GenerateArguments(int argument_count, int ast_id) {
5070 ASSERT(argument_count == 1);
5071 HValue* index = Pop();
5072 HInstruction* elements = AddInstruction(new HArgumentsElements);
5073 HInstruction* length = AddInstruction(new HArgumentsLength(elements));
5074 HAccessArgumentsAt* result = new HAccessArgumentsAt(elements, length, index);
5075 ast_context()->ReturnInstruction(result, ast_id);
5076}
5077
5078
5079// Support for accessing the class and value fields of an object.
5080void HGraphBuilder::GenerateClassOf(int argument_count, int ast_id) {
5081 // The special form detected by IsClassOfTest is detected before we get here
5082 // and does not cause a bailout.
5083 BAILOUT("inlined runtime function: ClassOf");
5084}
5085
5086
5087void HGraphBuilder::GenerateValueOf(int argument_count, int ast_id) {
5088 ASSERT(argument_count == 1);
5089 HValue* value = Pop();
5090 HValueOf* result = new HValueOf(value);
5091 ast_context()->ReturnInstruction(result, ast_id);
5092}
5093
5094
5095void HGraphBuilder::GenerateSetValueOf(int argument_count, int ast_id) {
5096 BAILOUT("inlined runtime function: SetValueOf");
5097}
5098
5099
5100// Fast support for charCodeAt(n).
5101void HGraphBuilder::GenerateStringCharCodeAt(int argument_count, int ast_id) {
5102 BAILOUT("inlined runtime function: StringCharCodeAt");
5103}
5104
5105
5106// Fast support for string.charAt(n) and string[n].
5107void HGraphBuilder::GenerateStringCharFromCode(int argument_count,
5108 int ast_id) {
5109 BAILOUT("inlined runtime function: StringCharFromCode");
5110}
5111
5112
5113// Fast support for string.charAt(n) and string[n].
5114void HGraphBuilder::GenerateStringCharAt(int argument_count, int ast_id) {
5115 ASSERT_EQ(2, argument_count);
5116 PushArgumentsForStubCall(argument_count);
5117 HCallStub* result = new HCallStub(CodeStub::StringCharAt, argument_count);
5118 ast_context()->ReturnInstruction(result, ast_id);
5119}
5120
5121
5122// Fast support for object equality testing.
5123void HGraphBuilder::GenerateObjectEquals(int argument_count, int ast_id) {
5124 ASSERT(argument_count == 2);
5125 HValue* right = Pop();
5126 HValue* left = Pop();
5127 HCompareJSObjectEq* result = new HCompareJSObjectEq(left, right);
5128 ast_context()->ReturnInstruction(result, ast_id);
5129}
5130
5131
5132void HGraphBuilder::GenerateLog(int argument_count, int ast_id) {
5133 UNREACHABLE(); // We caught this in VisitCallRuntime.
5134}
5135
5136
5137// Fast support for Math.random().
5138void HGraphBuilder::GenerateRandomHeapNumber(int argument_count, int ast_id) {
5139 BAILOUT("inlined runtime function: RandomHeapNumber");
5140}
5141
5142
5143// Fast support for StringAdd.
5144void HGraphBuilder::GenerateStringAdd(int argument_count, int ast_id) {
5145 ASSERT_EQ(2, argument_count);
5146 PushArgumentsForStubCall(argument_count);
5147 HCallStub* result = new HCallStub(CodeStub::StringAdd, argument_count);
5148 ast_context()->ReturnInstruction(result, ast_id);
5149}
5150
5151
5152// Fast support for SubString.
5153void HGraphBuilder::GenerateSubString(int argument_count, int ast_id) {
5154 ASSERT_EQ(3, argument_count);
5155 PushArgumentsForStubCall(argument_count);
5156 HCallStub* result = new HCallStub(CodeStub::SubString, argument_count);
5157 ast_context()->ReturnInstruction(result, ast_id);
5158}
5159
5160
5161// Fast support for StringCompare.
5162void HGraphBuilder::GenerateStringCompare(int argument_count, int ast_id) {
5163 ASSERT_EQ(2, argument_count);
5164 PushArgumentsForStubCall(argument_count);
5165 HCallStub* result = new HCallStub(CodeStub::StringCompare, argument_count);
5166 ast_context()->ReturnInstruction(result, ast_id);
5167}
5168
5169
5170// Support for direct calls from JavaScript to native RegExp code.
5171void HGraphBuilder::GenerateRegExpExec(int argument_count, int ast_id) {
5172 ASSERT_EQ(4, argument_count);
5173 PushArgumentsForStubCall(argument_count);
5174 HCallStub* result = new HCallStub(CodeStub::RegExpExec, argument_count);
5175 ast_context()->ReturnInstruction(result, ast_id);
5176}
5177
5178
5179// Construct a RegExp exec result with two in-object properties.
5180void HGraphBuilder::GenerateRegExpConstructResult(int argument_count,
5181 int ast_id) {
5182 ASSERT_EQ(3, argument_count);
5183 PushArgumentsForStubCall(argument_count);
5184 HCallStub* result =
5185 new HCallStub(CodeStub::RegExpConstructResult, argument_count);
5186 ast_context()->ReturnInstruction(result, ast_id);
5187}
5188
5189
5190// Support for fast native caches.
5191void HGraphBuilder::GenerateGetFromCache(int argument_count, int ast_id) {
5192 BAILOUT("inlined runtime function: GetFromCache");
5193}
5194
5195
5196// Fast support for number to string.
5197void HGraphBuilder::GenerateNumberToString(int argument_count, int ast_id) {
5198 ASSERT_EQ(1, argument_count);
5199 PushArgumentsForStubCall(argument_count);
5200 HCallStub* result = new HCallStub(CodeStub::NumberToString, argument_count);
5201 ast_context()->ReturnInstruction(result, ast_id);
5202}
5203
5204
5205// Fast swapping of elements. Takes three expressions, the object and two
5206// indices. This should only be used if the indices are known to be
5207// non-negative and within bounds of the elements array at the call site.
5208void HGraphBuilder::GenerateSwapElements(int argument_count, int ast_id) {
5209 BAILOUT("inlined runtime function: SwapElements");
5210}
5211
5212
5213// Fast call for custom callbacks.
5214void HGraphBuilder::GenerateCallFunction(int argument_count, int ast_id) {
5215 BAILOUT("inlined runtime function: CallFunction");
5216}
5217
5218
5219// Fast call to math functions.
5220void HGraphBuilder::GenerateMathPow(int argument_count, int ast_id) {
5221 ASSERT_EQ(2, argument_count);
5222 HValue* right = Pop();
5223 HValue* left = Pop();
5224 HPower* result = new HPower(left, right);
5225 ast_context()->ReturnInstruction(result, ast_id);
5226}
5227
5228
5229void HGraphBuilder::GenerateMathSin(int argument_count, int ast_id) {
5230 ASSERT_EQ(1, argument_count);
5231 PushArgumentsForStubCall(argument_count);
5232 HCallStub* result =
5233 new HCallStub(CodeStub::TranscendentalCache, argument_count);
5234 result->set_transcendental_type(TranscendentalCache::SIN);
5235 ast_context()->ReturnInstruction(result, ast_id);
5236}
5237
5238
5239void HGraphBuilder::GenerateMathCos(int argument_count, int ast_id) {
5240 ASSERT_EQ(1, argument_count);
5241 PushArgumentsForStubCall(argument_count);
5242 HCallStub* result =
5243 new HCallStub(CodeStub::TranscendentalCache, argument_count);
5244 result->set_transcendental_type(TranscendentalCache::COS);
5245 ast_context()->ReturnInstruction(result, ast_id);
5246}
5247
5248
5249void HGraphBuilder::GenerateMathLog(int argument_count, int ast_id) {
5250 ASSERT_EQ(1, argument_count);
5251 PushArgumentsForStubCall(argument_count);
5252 HCallStub* result =
5253 new HCallStub(CodeStub::TranscendentalCache, argument_count);
5254 result->set_transcendental_type(TranscendentalCache::LOG);
5255 ast_context()->ReturnInstruction(result, ast_id);
5256}
5257
5258
5259void HGraphBuilder::GenerateMathSqrt(int argument_count, int ast_id) {
5260 BAILOUT("inlined runtime function: MathSqrt");
5261}
5262
5263
5264// Check whether two RegExps are equivalent
5265void HGraphBuilder::GenerateIsRegExpEquivalent(int argument_count,
5266 int ast_id) {
5267 BAILOUT("inlined runtime function: IsRegExpEquivalent");
5268}
5269
5270
5271void HGraphBuilder::GenerateGetCachedArrayIndex(int argument_count,
5272 int ast_id) {
5273 BAILOUT("inlined runtime function: GetCachedArrayIndex");
5274}
5275
5276
5277void HGraphBuilder::GenerateFastAsciiArrayJoin(int argument_count,
5278 int ast_id) {
5279 BAILOUT("inlined runtime function: FastAsciiArrayJoin");
5280}
5281
5282
5283#undef BAILOUT
5284#undef CHECK_BAILOUT
5285#undef VISIT_FOR_EFFECT
5286#undef VISIT_FOR_VALUE
5287#undef ADD_TO_SUBGRAPH
5288
5289
5290HEnvironment::HEnvironment(HEnvironment* outer,
5291 Scope* scope,
5292 Handle<JSFunction> closure)
5293 : closure_(closure),
5294 values_(0),
5295 assigned_variables_(4),
5296 parameter_count_(0),
5297 local_count_(0),
5298 outer_(outer),
5299 pop_count_(0),
5300 push_count_(0),
5301 ast_id_(AstNode::kNoNumber) {
5302 Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
5303}
5304
5305
5306HEnvironment::HEnvironment(const HEnvironment* other)
5307 : values_(0),
5308 assigned_variables_(0),
5309 parameter_count_(0),
5310 local_count_(0),
5311 outer_(NULL),
5312 pop_count_(0),
5313 push_count_(0),
5314 ast_id_(other->ast_id()) {
5315 Initialize(other);
5316}
5317
5318
5319void HEnvironment::Initialize(int parameter_count,
5320 int local_count,
5321 int stack_height) {
5322 parameter_count_ = parameter_count;
5323 local_count_ = local_count;
5324
5325 // Avoid reallocating the temporaries' backing store on the first Push.
5326 int total = parameter_count + local_count + stack_height;
5327 values_.Initialize(total + 4);
5328 for (int i = 0; i < total; ++i) values_.Add(NULL);
5329}
5330
5331
Steve Block9fac8402011-05-12 15:51:54 +01005332void HEnvironment::Initialize(const HEnvironment* other) {
5333 closure_ = other->closure();
5334 values_.AddAll(other->values_);
5335 assigned_variables_.AddAll(other->assigned_variables_);
5336 parameter_count_ = other->parameter_count_;
5337 local_count_ = other->local_count_;
5338 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
5339 pop_count_ = other->pop_count_;
5340 push_count_ = other->push_count_;
5341 ast_id_ = other->ast_id_;
5342}
5343
5344
Ben Murdochb0fe1622011-05-05 13:52:32 +01005345void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
5346 ASSERT(!block->IsLoopHeader());
5347 ASSERT(values_.length() == other->values_.length());
5348
5349 int length = values_.length();
5350 for (int i = 0; i < length; ++i) {
5351 HValue* value = values_[i];
5352 if (value != NULL && value->IsPhi() && value->block() == block) {
5353 // There is already a phi for the i'th value.
5354 HPhi* phi = HPhi::cast(value);
5355 // Assert index is correct and that we haven't missed an incoming edge.
5356 ASSERT(phi->merged_index() == i);
5357 ASSERT(phi->OperandCount() == block->predecessors()->length());
5358 phi->AddInput(other->values_[i]);
5359 } else if (values_[i] != other->values_[i]) {
5360 // There is a fresh value on the incoming edge, a phi is needed.
5361 ASSERT(values_[i] != NULL && other->values_[i] != NULL);
5362 HPhi* phi = new HPhi(i);
5363 HValue* old_value = values_[i];
5364 for (int j = 0; j < block->predecessors()->length(); j++) {
5365 phi->AddInput(old_value);
5366 }
5367 phi->AddInput(other->values_[i]);
5368 this->values_[i] = phi;
5369 block->AddPhi(phi);
5370 }
5371 }
5372}
5373
5374
Steve Block9fac8402011-05-12 15:51:54 +01005375void HEnvironment::Bind(int index, HValue* value) {
5376 ASSERT(value != NULL);
5377 if (!assigned_variables_.Contains(index)) {
5378 assigned_variables_.Add(index);
5379 }
5380 values_[index] = value;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005381}
5382
5383
Steve Block9fac8402011-05-12 15:51:54 +01005384bool HEnvironment::HasExpressionAt(int index) const {
5385 return index >= parameter_count_ + local_count_;
5386}
5387
5388
5389bool HEnvironment::ExpressionStackIsEmpty() const {
5390 int first_expression = parameter_count() + local_count();
5391 ASSERT(length() >= first_expression);
5392 return length() == first_expression;
5393}
5394
5395
5396void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
5397 int count = index_from_top + 1;
5398 int index = values_.length() - count;
5399 ASSERT(HasExpressionAt(index));
5400 // The push count must include at least the element in question or else
5401 // the new value will not be included in this environment's history.
5402 if (push_count_ < count) {
5403 // This is the same effect as popping then re-pushing 'count' elements.
5404 pop_count_ += (count - push_count_);
5405 push_count_ = count;
5406 }
5407 values_[index] = value;
5408}
5409
5410
5411void HEnvironment::Drop(int count) {
5412 for (int i = 0; i < count; ++i) {
5413 Pop();
Ben Murdochb0fe1622011-05-05 13:52:32 +01005414 }
5415}
5416
5417
5418HEnvironment* HEnvironment::Copy() const {
5419 return new HEnvironment(this);
5420}
5421
5422
5423HEnvironment* HEnvironment::CopyWithoutHistory() const {
5424 HEnvironment* result = Copy();
5425 result->ClearHistory();
5426 return result;
5427}
5428
5429
5430HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
5431 HEnvironment* new_env = Copy();
5432 for (int i = 0; i < values_.length(); ++i) {
5433 HPhi* phi = new HPhi(i);
5434 phi->AddInput(values_[i]);
5435 new_env->values_[i] = phi;
5436 loop_header->AddPhi(phi);
5437 }
5438 new_env->ClearHistory();
5439 return new_env;
5440}
5441
5442
5443HEnvironment* HEnvironment::CopyForInlining(Handle<JSFunction> target,
5444 FunctionLiteral* function,
5445 bool is_speculative,
5446 HConstant* undefined) const {
5447 // Outer environment is a copy of this one without the arguments.
5448 int arity = function->scope()->num_parameters();
5449 HEnvironment* outer = Copy();
5450 outer->Drop(arity + 1); // Including receiver.
5451 outer->ClearHistory();
5452 HEnvironment* inner = new HEnvironment(outer, function->scope(), target);
5453 // Get the argument values from the original environment.
5454 if (is_speculative) {
5455 for (int i = 0; i <= arity; ++i) { // Include receiver.
5456 HValue* push = ExpressionStackAt(arity - i);
5457 inner->SetValueAt(i, push);
5458 }
5459 } else {
5460 for (int i = 0; i <= arity; ++i) { // Include receiver.
5461 inner->SetValueAt(i, ExpressionStackAt(arity - i));
5462 }
5463 }
5464
5465 // Initialize the stack-allocated locals to undefined.
5466 int local_base = arity + 1;
5467 int local_count = function->scope()->num_stack_slots();
5468 for (int i = 0; i < local_count; ++i) {
5469 inner->SetValueAt(local_base + i, undefined);
5470 }
5471
5472 inner->set_ast_id(function->id());
5473 return inner;
5474}
5475
5476
5477void HEnvironment::PrintTo(StringStream* stream) {
Steve Block9fac8402011-05-12 15:51:54 +01005478 for (int i = 0; i < length(); i++) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01005479 if (i == 0) stream->Add("parameters\n");
5480 if (i == parameter_count()) stream->Add("locals\n");
5481 if (i == parameter_count() + local_count()) stream->Add("expressions");
5482 HValue* val = values_.at(i);
5483 stream->Add("%d: ", i);
5484 if (val != NULL) {
5485 val->PrintNameTo(stream);
5486 } else {
5487 stream->Add("NULL");
5488 }
5489 stream->Add("\n");
5490 }
5491}
5492
5493
5494void HEnvironment::PrintToStd() {
5495 HeapStringAllocator string_allocator;
5496 StringStream trace(&string_allocator);
5497 PrintTo(&trace);
5498 PrintF("%s", *trace.ToCString());
5499}
5500
5501
5502void HTracer::TraceCompilation(FunctionLiteral* function) {
5503 Tag tag(this, "compilation");
5504 Handle<String> name = function->debug_name();
5505 PrintStringProperty("name", *name->ToCString());
5506 PrintStringProperty("method", *name->ToCString());
5507 PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
5508}
5509
5510
5511void HTracer::TraceLithium(const char* name, LChunk* chunk) {
5512 Trace(name, chunk->graph(), chunk);
5513}
5514
5515
5516void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
5517 Trace(name, graph, NULL);
5518}
5519
5520
5521void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
5522 Tag tag(this, "cfg");
5523 PrintStringProperty("name", name);
5524 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
5525 for (int i = 0; i < blocks->length(); i++) {
5526 HBasicBlock* current = blocks->at(i);
5527 Tag block_tag(this, "block");
5528 PrintBlockProperty("name", current->block_id());
5529 PrintIntProperty("from_bci", -1);
5530 PrintIntProperty("to_bci", -1);
5531
5532 if (!current->predecessors()->is_empty()) {
5533 PrintIndent();
5534 trace_.Add("predecessors");
5535 for (int j = 0; j < current->predecessors()->length(); ++j) {
5536 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
5537 }
5538 trace_.Add("\n");
5539 } else {
5540 PrintEmptyProperty("predecessors");
5541 }
5542
5543 if (current->end() == NULL || current->end()->FirstSuccessor() == NULL) {
5544 PrintEmptyProperty("successors");
5545 } else if (current->end()->SecondSuccessor() == NULL) {
5546 PrintBlockProperty("successors",
5547 current->end()->FirstSuccessor()->block_id());
5548 } else {
5549 PrintBlockProperty("successors",
5550 current->end()->FirstSuccessor()->block_id(),
5551 current->end()->SecondSuccessor()->block_id());
5552 }
5553
5554 PrintEmptyProperty("xhandlers");
5555 PrintEmptyProperty("flags");
5556
5557 if (current->dominator() != NULL) {
5558 PrintBlockProperty("dominator", current->dominator()->block_id());
5559 }
5560
5561 if (chunk != NULL) {
5562 int first_index = current->first_instruction_index();
5563 int last_index = current->last_instruction_index();
5564 PrintIntProperty(
5565 "first_lir_id",
5566 LifetimePosition::FromInstructionIndex(first_index).Value());
5567 PrintIntProperty(
5568 "last_lir_id",
5569 LifetimePosition::FromInstructionIndex(last_index).Value());
5570 }
5571
5572 {
5573 Tag states_tag(this, "states");
5574 Tag locals_tag(this, "locals");
5575 int total = current->phis()->length();
5576 trace_.Add("size %d\n", total);
5577 trace_.Add("method \"None\"");
5578 for (int j = 0; j < total; ++j) {
5579 HPhi* phi = current->phis()->at(j);
5580 trace_.Add("%d ", phi->merged_index());
5581 phi->PrintNameTo(&trace_);
5582 trace_.Add(" ");
5583 phi->PrintTo(&trace_);
5584 trace_.Add("\n");
5585 }
5586 }
5587
5588 {
5589 Tag HIR_tag(this, "HIR");
5590 HInstruction* instruction = current->first();
5591 while (instruction != NULL) {
5592 int bci = 0;
5593 int uses = instruction->uses()->length();
5594 trace_.Add("%d %d ", bci, uses);
5595 instruction->PrintNameTo(&trace_);
5596 trace_.Add(" ");
5597 instruction->PrintTo(&trace_);
5598 trace_.Add(" <|@\n");
5599 instruction = instruction->next();
5600 }
5601 }
5602
5603
5604 if (chunk != NULL) {
5605 Tag LIR_tag(this, "LIR");
5606 int first_index = current->first_instruction_index();
5607 int last_index = current->last_instruction_index();
5608 if (first_index != -1 && last_index != -1) {
5609 const ZoneList<LInstruction*>* instructions = chunk->instructions();
5610 for (int i = first_index; i <= last_index; ++i) {
5611 LInstruction* linstr = instructions->at(i);
5612 if (linstr != NULL) {
5613 trace_.Add("%d ",
5614 LifetimePosition::FromInstructionIndex(i).Value());
5615 linstr->PrintTo(&trace_);
5616 trace_.Add(" <|@\n");
5617 }
5618 }
5619 }
5620 }
5621 }
5622}
5623
5624
5625void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
5626 Tag tag(this, "intervals");
5627 PrintStringProperty("name", name);
5628
5629 const ZoneList<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
5630 for (int i = 0; i < fixed_d->length(); ++i) {
5631 TraceLiveRange(fixed_d->at(i), "fixed");
5632 }
5633
5634 const ZoneList<LiveRange*>* fixed = allocator->fixed_live_ranges();
5635 for (int i = 0; i < fixed->length(); ++i) {
5636 TraceLiveRange(fixed->at(i), "fixed");
5637 }
5638
5639 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
5640 for (int i = 0; i < live_ranges->length(); ++i) {
5641 TraceLiveRange(live_ranges->at(i), "object");
5642 }
5643}
5644
5645
5646void HTracer::TraceLiveRange(LiveRange* range, const char* type) {
5647 if (range != NULL && !range->IsEmpty()) {
5648 trace_.Add("%d %s", range->id(), type);
5649 if (range->HasRegisterAssigned()) {
5650 LOperand* op = range->CreateAssignedOperand();
5651 int assigned_reg = op->index();
5652 if (op->IsDoubleRegister()) {
5653 trace_.Add(" \"%s\"",
5654 DoubleRegister::AllocationIndexToString(assigned_reg));
5655 } else {
5656 ASSERT(op->IsRegister());
5657 trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
5658 }
5659 } else if (range->IsSpilled()) {
5660 LOperand* op = range->TopLevel()->GetSpillOperand();
5661 if (op->IsDoubleStackSlot()) {
5662 trace_.Add(" \"double_stack:%d\"", op->index());
5663 } else {
5664 ASSERT(op->IsStackSlot());
5665 trace_.Add(" \"stack:%d\"", op->index());
5666 }
5667 }
5668 int parent_index = -1;
5669 if (range->IsChild()) {
5670 parent_index = range->parent()->id();
5671 } else {
5672 parent_index = range->id();
5673 }
5674 LOperand* op = range->FirstHint();
5675 int hint_index = -1;
5676 if (op != NULL && op->IsUnallocated()) hint_index = op->VirtualRegister();
5677 trace_.Add(" %d %d", parent_index, hint_index);
5678 UseInterval* cur_interval = range->first_interval();
5679 while (cur_interval != NULL) {
5680 trace_.Add(" [%d, %d[",
5681 cur_interval->start().Value(),
5682 cur_interval->end().Value());
5683 cur_interval = cur_interval->next();
5684 }
5685
5686 UsePosition* current_pos = range->first_pos();
5687 while (current_pos != NULL) {
5688 if (current_pos->RegisterIsBeneficial()) {
5689 trace_.Add(" %d M", current_pos->pos().Value());
5690 }
5691 current_pos = current_pos->next();
5692 }
5693
5694 trace_.Add(" \"\"\n");
5695 }
5696}
5697
5698
5699void HTracer::FlushToFile() {
5700 AppendChars(filename_, *trace_.ToCString(), trace_.length(), false);
5701 trace_.Reset();
5702}
5703
5704
5705void HStatistics::Print() {
5706 PrintF("Timing results:\n");
5707 int64_t sum = 0;
5708 for (int i = 0; i < timing_.length(); ++i) {
5709 sum += timing_[i];
5710 }
5711
5712 for (int i = 0; i < names_.length(); ++i) {
5713 PrintF("%30s", names_[i]);
5714 double ms = static_cast<double>(timing_[i]) / 1000;
5715 double percent = static_cast<double>(timing_[i]) * 100 / sum;
5716 PrintF(" - %0.3f ms / %0.3f %% \n", ms, percent);
5717 }
5718 PrintF("%30s - %0.3f ms \n", "Sum", static_cast<double>(sum) / 1000);
5719 PrintF("---------------------------------------------------------------\n");
5720 PrintF("%30s - %0.3f ms (%0.1f times slower than full code gen)\n",
5721 "Total",
5722 static_cast<double>(total_) / 1000,
5723 static_cast<double>(total_) / full_code_gen_);
5724}
5725
5726
5727void HStatistics::SaveTiming(const char* name, int64_t ticks) {
5728 if (name == HPhase::kFullCodeGen) {
5729 full_code_gen_ += ticks;
5730 } else if (name == HPhase::kTotal) {
5731 total_ += ticks;
5732 } else {
5733 for (int i = 0; i < names_.length(); ++i) {
5734 if (names_[i] == name) {
5735 timing_[i] += ticks;
5736 return;
5737 }
5738 }
5739 names_.Add(name);
5740 timing_.Add(ticks);
5741 }
5742}
5743
5744
5745const char* const HPhase::kFullCodeGen = "Full code generator";
5746const char* const HPhase::kTotal = "Total";
5747
5748
5749void HPhase::Begin(const char* name,
5750 HGraph* graph,
5751 LChunk* chunk,
5752 LAllocator* allocator) {
5753 name_ = name;
5754 graph_ = graph;
5755 chunk_ = chunk;
5756 allocator_ = allocator;
5757 if (allocator != NULL && chunk_ == NULL) {
5758 chunk_ = allocator->chunk();
5759 }
5760 if (FLAG_time_hydrogen) start_ = OS::Ticks();
5761}
5762
5763
5764void HPhase::End() const {
5765 if (FLAG_time_hydrogen) {
5766 int64_t end = OS::Ticks();
5767 HStatistics::Instance()->SaveTiming(name_, end - start_);
5768 }
5769
5770 if (FLAG_trace_hydrogen) {
5771 if (graph_ != NULL) HTracer::Instance()->TraceHydrogen(name_, graph_);
5772 if (chunk_ != NULL) HTracer::Instance()->TraceLithium(name_, chunk_);
5773 if (allocator_ != NULL) {
5774 HTracer::Instance()->TraceLiveRanges(name_, allocator_);
5775 }
5776 }
5777
5778#ifdef DEBUG
5779 if (graph_ != NULL) graph_->Verify();
5780 if (chunk_ != NULL) chunk_->Verify();
5781 if (allocator_ != NULL) allocator_->Verify();
5782#endif
5783}
5784
5785} } // namespace v8::internal