blob: 416a69c708964f5320373ca94c46ed2da6074604 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/full-codegen/full-codegen.h"
6
7#include "src/ast/ast.h"
8#include "src/ast/ast-numbering.h"
9#include "src/ast/prettyprinter.h"
10#include "src/ast/scopeinfo.h"
11#include "src/ast/scopes.h"
12#include "src/code-factory.h"
13#include "src/codegen.h"
14#include "src/compiler.h"
15#include "src/debug/debug.h"
16#include "src/debug/liveedit.h"
17#include "src/isolate-inl.h"
18#include "src/macro-assembler.h"
19#include "src/snapshot/snapshot.h"
20
21namespace v8 {
22namespace internal {
23
24#define __ ACCESS_MASM(masm())
25
26bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
27 Isolate* isolate = info->isolate();
28
29 TimerEventScope<TimerEventCompileFullCode> timer(info->isolate());
30
31 // Ensure that the feedback vector is large enough.
32 info->EnsureFeedbackVector();
33
34 Handle<Script> script = info->script();
35 if (!script->IsUndefined() && !script->source()->IsUndefined()) {
36 int len = String::cast(script->source())->length();
37 isolate->counters()->total_full_codegen_source_size()->Increment(len);
38 }
39 CodeGenerator::MakeCodePrologue(info, "full");
40 const int kInitialBufferSize = 4 * KB;
41 MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize,
42 CodeObjectRequired::kYes);
43 if (info->will_serialize()) masm.enable_serializer();
44
45 LOG_CODE_EVENT(isolate,
46 CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
47
48 FullCodeGenerator cgen(&masm, info);
49 cgen.Generate();
50 if (cgen.HasStackOverflow()) {
51 DCHECK(!isolate->has_pending_exception());
52 return false;
53 }
54 unsigned table_offset = cgen.EmitBackEdgeTable();
55
56 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, info);
57 cgen.PopulateDeoptimizationData(code);
58 cgen.PopulateTypeFeedbackInfo(code);
59 cgen.PopulateHandlerTable(code);
60 code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
61 code->set_has_reloc_info_for_serialization(info->will_serialize());
62 code->set_allow_osr_at_loop_nesting_level(0);
63 code->set_profiler_ticks(0);
64 code->set_back_edge_table_offset(table_offset);
65 CodeGenerator::PrintCode(code, info);
66 info->SetCode(code);
67 void* line_info = masm.positions_recorder()->DetachJITHandlerData();
68 LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
69
70#ifdef DEBUG
71 // Check that no context-specific object has been embedded.
72 code->VerifyEmbeddedObjects(Code::kNoContextSpecificPointers);
73#endif // DEBUG
74 return true;
75}
76
77
78unsigned FullCodeGenerator::EmitBackEdgeTable() {
79 // The back edge table consists of a length (in number of entries)
80 // field, and then a sequence of entries. Each entry is a pair of AST id
81 // and code-relative pc offset.
82 masm()->Align(kPointerSize);
83 unsigned offset = masm()->pc_offset();
84 unsigned length = back_edges_.length();
85 __ dd(length);
86 for (unsigned i = 0; i < length; ++i) {
87 __ dd(back_edges_[i].id.ToInt());
88 __ dd(back_edges_[i].pc);
89 __ dd(back_edges_[i].loop_depth);
90 }
91 return offset;
92}
93
94
95void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
96 // Fill in the deoptimization information.
97 DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
98 if (!info_->HasDeoptimizationSupport()) return;
99 int length = bailout_entries_.length();
100 Handle<DeoptimizationOutputData> data =
101 DeoptimizationOutputData::New(isolate(), length, TENURED);
102 for (int i = 0; i < length; i++) {
103 data->SetAstId(i, bailout_entries_[i].id);
104 data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
105 }
106 code->set_deoptimization_data(*data);
107}
108
109
110void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
111 Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
112 info->set_ic_total_count(ic_total_count_);
113 DCHECK(!isolate()->heap()->InNewSpace(*info));
114 code->set_type_feedback_info(*info);
115}
116
117
118void FullCodeGenerator::PopulateHandlerTable(Handle<Code> code) {
119 int handler_table_size = static_cast<int>(handler_table_.size());
120 Handle<HandlerTable> table =
121 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
122 HandlerTable::LengthForRange(handler_table_size), TENURED));
123 for (int i = 0; i < handler_table_size; ++i) {
124 HandlerTable::CatchPrediction prediction =
125 handler_table_[i].try_catch_depth > 0 ? HandlerTable::CAUGHT
126 : HandlerTable::UNCAUGHT;
127 table->SetRangeStart(i, handler_table_[i].range_start);
128 table->SetRangeEnd(i, handler_table_[i].range_end);
129 table->SetRangeHandler(i, handler_table_[i].handler_offset, prediction);
130 table->SetRangeDepth(i, handler_table_[i].stack_depth);
131 }
132 code->set_handler_table(*table);
133}
134
135
136int FullCodeGenerator::NewHandlerTableEntry() {
137 int index = static_cast<int>(handler_table_.size());
138 HandlerTableEntry entry = {0, 0, 0, 0, 0};
139 handler_table_.push_back(entry);
140 return index;
141}
142
143
144bool FullCodeGenerator::MustCreateObjectLiteralWithRuntime(
145 ObjectLiteral* expr) const {
146 int literal_flags = expr->ComputeFlags();
147 // FastCloneShallowObjectStub doesn't copy elements, and object literals don't
148 // support copy-on-write (COW) elements for now.
149 // TODO(mvstanton): make object literals support COW elements.
150 return masm()->serializer_enabled() ||
151 literal_flags != ObjectLiteral::kShallowProperties ||
152 literal_flags != ObjectLiteral::kFastElements ||
153 expr->properties_count() >
154 FastCloneShallowObjectStub::kMaximumClonedProperties;
155}
156
157
158bool FullCodeGenerator::MustCreateArrayLiteralWithRuntime(
159 ArrayLiteral* expr) const {
160 // TODO(rossberg): Teach strong mode to FastCloneShallowArrayStub.
161 return expr->depth() > 1 || expr->is_strong() ||
162 expr->values()->length() > JSArray::kInitialMaxFastElementArray;
163}
164
165
166void FullCodeGenerator::Initialize() {
167 InitializeAstVisitor(info_->isolate());
168 // The generation of debug code must match between the snapshot code and the
169 // code that is generated later. This is assumed by the debugger when it is
170 // calculating PC offsets after generating a debug version of code. Therefore
171 // we disable the production of debug code in the full compiler if we are
172 // either generating a snapshot or we booted from a snapshot.
173 generate_debug_code_ = FLAG_debug_code && !masm_->serializer_enabled() &&
174 !info_->isolate()->snapshot_available();
175 masm_->set_emit_debug_code(generate_debug_code_);
176 masm_->set_predictable_code_size(true);
177}
178
179
180void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
181 PrepareForBailoutForId(node->id(), state);
182}
183
184
185void FullCodeGenerator::CallLoadIC(TypeofMode typeof_mode,
186 LanguageMode language_mode,
187 TypeFeedbackId id) {
188 Handle<Code> ic =
189 CodeFactory::LoadIC(isolate(), typeof_mode, language_mode).code();
190 CallIC(ic, id);
191}
192
193
194void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) {
195 Handle<Code> ic = CodeFactory::StoreIC(isolate(), language_mode()).code();
196 CallIC(ic, id);
197}
198
199
200void FullCodeGenerator::RecordJSReturnSite(Call* call) {
201 // We record the offset of the function return so we can rebuild the frame
202 // if the function was inlined, i.e., this is the return address in the
203 // inlined function's frame.
204 //
205 // The state is ignored. We defensively set it to TOS_REG, which is the
206 // real state of the unoptimized code at the return site.
207 PrepareForBailoutForId(call->ReturnId(), TOS_REG);
208#ifdef DEBUG
209 // In debug builds, mark the return so we can verify that this function
210 // was called.
211 DCHECK(!call->return_is_recorded_);
212 call->return_is_recorded_ = true;
213#endif
214}
215
216
217void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
218 // There's no need to prepare this code for bailouts from already optimized
219 // code or code that can't be optimized.
220 if (!info_->HasDeoptimizationSupport()) return;
221 unsigned pc_and_state =
222 StateField::encode(state) | PcField::encode(masm_->pc_offset());
223 DCHECK(Smi::IsValid(pc_and_state));
224#ifdef DEBUG
225 for (int i = 0; i < bailout_entries_.length(); ++i) {
226 DCHECK(bailout_entries_[i].id != id);
227 }
228#endif
229 BailoutEntry entry = { id, pc_and_state };
230 bailout_entries_.Add(entry, zone());
231}
232
233
234void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
235 // The pc offset does not need to be encoded and packed together with a state.
236 DCHECK(masm_->pc_offset() > 0);
237 DCHECK(loop_depth() > 0);
238 uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
239 BackEdgeEntry entry =
240 { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
241 back_edges_.Add(entry, zone());
242}
243
244
245bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
246 // Inline smi case inside loops, but not division and modulo which
247 // are too complicated and take up too much space.
248 if (op == Token::DIV ||op == Token::MOD) return false;
249 if (FLAG_always_inline_smi_code) return true;
250 return loop_depth_ > 0;
251}
252
253
254void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
255 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
256}
257
258
259void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
260 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
261 codegen()->GetVar(result_register(), var);
262}
263
264
265void FullCodeGenerator::TestContext::Plug(Variable* var) const {
266 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
267 // For simplicity we always test the accumulator register.
268 codegen()->GetVar(result_register(), var);
269 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
270 codegen()->DoTest(this);
271}
272
273
274void FullCodeGenerator::EffectContext::Plug(Register reg) const {
275}
276
277
278void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
279 __ Move(result_register(), reg);
280}
281
282
283void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
284 __ Push(reg);
285}
286
287
288void FullCodeGenerator::TestContext::Plug(Register reg) const {
289 // For simplicity we always test the accumulator register.
290 __ Move(result_register(), reg);
291 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
292 codegen()->DoTest(this);
293}
294
295
296void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
297
298
299void FullCodeGenerator::EffectContext::PlugTOS() const {
300 __ Drop(1);
301}
302
303
304void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
305 __ Pop(result_register());
306}
307
308
309void FullCodeGenerator::StackValueContext::PlugTOS() const {
310}
311
312
313void FullCodeGenerator::TestContext::PlugTOS() const {
314 // For simplicity we always test the accumulator register.
315 __ Pop(result_register());
316 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
317 codegen()->DoTest(this);
318}
319
320
321void FullCodeGenerator::EffectContext::PrepareTest(
322 Label* materialize_true,
323 Label* materialize_false,
324 Label** if_true,
325 Label** if_false,
326 Label** fall_through) const {
327 // In an effect context, the true and the false case branch to the
328 // same label.
329 *if_true = *if_false = *fall_through = materialize_true;
330}
331
332
333void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
334 Label* materialize_true,
335 Label* materialize_false,
336 Label** if_true,
337 Label** if_false,
338 Label** fall_through) const {
339 *if_true = *fall_through = materialize_true;
340 *if_false = materialize_false;
341}
342
343
344void FullCodeGenerator::StackValueContext::PrepareTest(
345 Label* materialize_true,
346 Label* materialize_false,
347 Label** if_true,
348 Label** if_false,
349 Label** fall_through) const {
350 *if_true = *fall_through = materialize_true;
351 *if_false = materialize_false;
352}
353
354
355void FullCodeGenerator::TestContext::PrepareTest(
356 Label* materialize_true,
357 Label* materialize_false,
358 Label** if_true,
359 Label** if_false,
360 Label** fall_through) const {
361 *if_true = true_label_;
362 *if_false = false_label_;
363 *fall_through = fall_through_;
364}
365
366
367void FullCodeGenerator::DoTest(const TestContext* context) {
368 DoTest(context->condition(),
369 context->true_label(),
370 context->false_label(),
371 context->fall_through());
372}
373
374
375void FullCodeGenerator::VisitDeclarations(
376 ZoneList<Declaration*>* declarations) {
377 ZoneList<Handle<Object> >* saved_globals = globals_;
378 ZoneList<Handle<Object> > inner_globals(10, zone());
379 globals_ = &inner_globals;
380
381 AstVisitor::VisitDeclarations(declarations);
382
383 if (!globals_->is_empty()) {
384 // Invoke the platform-dependent code generator to do the actual
385 // declaration of the global functions and variables.
386 Handle<FixedArray> array =
387 isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
388 for (int i = 0; i < globals_->length(); ++i)
389 array->set(i, *globals_->at(i));
390 DeclareGlobals(array);
391 }
392
393 globals_ = saved_globals;
394}
395
396
397void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
398 VariableProxy* proxy = declaration->proxy();
399 Variable* variable = proxy->var();
400 switch (variable->location()) {
401 case VariableLocation::GLOBAL:
402 case VariableLocation::UNALLOCATED:
403 // TODO(rossberg)
404 break;
405
406 case VariableLocation::CONTEXT: {
407 Comment cmnt(masm_, "[ ImportDeclaration");
408 EmitDebugCheckDeclarationContext(variable);
409 // TODO(rossberg)
410 break;
411 }
412
413 case VariableLocation::PARAMETER:
414 case VariableLocation::LOCAL:
415 case VariableLocation::LOOKUP:
416 UNREACHABLE();
417 }
418}
419
420
421void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
422 // TODO(rossberg)
423}
424
425
426void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
427 Comment cmnt(masm_, "[ VariableProxy");
428 EmitVariableLoad(expr);
429}
430
431
432void FullCodeGenerator::VisitSloppyBlockFunctionStatement(
433 SloppyBlockFunctionStatement* declaration) {
434 Visit(declaration->statement());
435}
436
437
438int FullCodeGenerator::DeclareGlobalsFlags() {
439 DCHECK(DeclareGlobalsLanguageMode::is_valid(language_mode()));
440 return DeclareGlobalsEvalFlag::encode(is_eval()) |
441 DeclareGlobalsNativeFlag::encode(is_native()) |
442 DeclareGlobalsLanguageMode::encode(language_mode());
443}
444
445
446void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
447 // Load the arguments on the stack and call the stub.
448 SubStringStub stub(isolate());
449 ZoneList<Expression*>* args = expr->arguments();
450 DCHECK(args->length() == 3);
451 VisitForStackValue(args->at(0));
452 VisitForStackValue(args->at(1));
453 VisitForStackValue(args->at(2));
454 __ CallStub(&stub);
455 context()->Plug(result_register());
456}
457
458
459void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
460 // Load the arguments on the stack and call the stub.
461 RegExpExecStub stub(isolate());
462 ZoneList<Expression*>* args = expr->arguments();
463 DCHECK(args->length() == 4);
464 VisitForStackValue(args->at(0));
465 VisitForStackValue(args->at(1));
466 VisitForStackValue(args->at(2));
467 VisitForStackValue(args->at(3));
468 __ CallStub(&stub);
469 context()->Plug(result_register());
470}
471
472
473void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
474 // Load the arguments on the stack and call the runtime function.
475 ZoneList<Expression*>* args = expr->arguments();
476 DCHECK(args->length() == 2);
477 VisitForStackValue(args->at(0));
478 VisitForStackValue(args->at(1));
479
480 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
481 __ CallStub(&stub);
482 context()->Plug(result_register());
483}
484
485
486void FullCodeGenerator::EmitIntrinsicAsStubCall(CallRuntime* expr,
487 const Callable& callable) {
488 ZoneList<Expression*>* args = expr->arguments();
489 int param_count = callable.descriptor().GetRegisterParameterCount();
490 DCHECK_EQ(args->length(), param_count);
491
492 if (param_count > 0) {
493 int last = param_count - 1;
494 // Put all but last arguments on stack.
495 for (int i = 0; i < last; i++) {
496 VisitForStackValue(args->at(i));
497 }
498 // The last argument goes to the accumulator.
499 VisitForAccumulatorValue(args->at(last));
500
501 // Move the arguments to the registers, as required by the stub.
502 __ Move(callable.descriptor().GetRegisterParameter(last),
503 result_register());
504 for (int i = last; i-- > 0;) {
505 __ Pop(callable.descriptor().GetRegisterParameter(i));
506 }
507 }
508 __ Call(callable.code(), RelocInfo::CODE_TARGET);
509 context()->Plug(result_register());
510}
511
512
513void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
514 EmitIntrinsicAsStubCall(expr, CodeFactory::NumberToString(isolate()));
515}
516
517
518void FullCodeGenerator::EmitToString(CallRuntime* expr) {
519 EmitIntrinsicAsStubCall(expr, CodeFactory::ToString(isolate()));
520}
521
522
523void FullCodeGenerator::EmitToLength(CallRuntime* expr) {
524 EmitIntrinsicAsStubCall(expr, CodeFactory::ToLength(isolate()));
525}
526
527
528void FullCodeGenerator::EmitToNumber(CallRuntime* expr) {
529 EmitIntrinsicAsStubCall(expr, CodeFactory::ToNumber(isolate()));
530}
531
532
533void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
534 EmitIntrinsicAsStubCall(expr, CodeFactory::ToObject(isolate()));
535}
536
537
538void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
539 EmitIntrinsicAsStubCall(expr, CodeFactory::RegExpConstructResult(isolate()));
540}
541
542
543bool RecordStatementPosition(MacroAssembler* masm, int pos) {
544 if (pos == RelocInfo::kNoPosition) return false;
545 masm->positions_recorder()->RecordStatementPosition(pos);
546 masm->positions_recorder()->RecordPosition(pos);
547 return masm->positions_recorder()->WriteRecordedPositions();
548}
549
550
551bool RecordPosition(MacroAssembler* masm, int pos) {
552 if (pos == RelocInfo::kNoPosition) return false;
553 masm->positions_recorder()->RecordPosition(pos);
554 return masm->positions_recorder()->WriteRecordedPositions();
555}
556
557
558void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
559 RecordPosition(masm_, fun->start_position());
560}
561
562
563void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
564 // For default constructors, start position equals end position, and there
565 // is no source code besides the class literal.
566 int pos = std::max(fun->start_position(), fun->end_position() - 1);
567 RecordStatementPosition(masm_, pos);
568 if (info_->is_debug()) {
569 // Always emit a debug break slot before a return.
570 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_RETURN);
571 }
572}
573
574
575void FullCodeGenerator::SetStatementPosition(
576 Statement* stmt, FullCodeGenerator::InsertBreak insert_break) {
577 if (stmt->position() == RelocInfo::kNoPosition) return;
578 bool recorded = RecordStatementPosition(masm_, stmt->position());
579 if (recorded && insert_break == INSERT_BREAK && info_->is_debug() &&
580 !stmt->IsDebuggerStatement()) {
581 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION);
582 }
583}
584
585
586void FullCodeGenerator::SetExpressionPosition(
587 Expression* expr, FullCodeGenerator::InsertBreak insert_break) {
588 if (expr->position() == RelocInfo::kNoPosition) return;
589 bool recorded = RecordPosition(masm_, expr->position());
590 if (recorded && insert_break == INSERT_BREAK && info_->is_debug()) {
591 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION);
592 }
593}
594
595
596void FullCodeGenerator::SetExpressionAsStatementPosition(Expression* expr) {
597 if (expr->position() == RelocInfo::kNoPosition) return;
598 bool recorded = RecordStatementPosition(masm_, expr->position());
599 if (recorded && info_->is_debug()) {
600 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION);
601 }
602}
603
604
605void FullCodeGenerator::SetCallPosition(Expression* expr) {
606 if (expr->position() == RelocInfo::kNoPosition) return;
607 RecordPosition(masm_, expr->position());
608 if (info_->is_debug()) {
609 // Always emit a debug break slot before a call.
610 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_CALL);
611 }
612}
613
614
615void FullCodeGenerator::VisitSuperPropertyReference(
616 SuperPropertyReference* super) {
617 __ CallRuntime(Runtime::kThrowUnsupportedSuperError);
618}
619
620
621void FullCodeGenerator::VisitSuperCallReference(SuperCallReference* super) {
622 __ CallRuntime(Runtime::kThrowUnsupportedSuperError);
623}
624
625
626void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
627 ZoneList<Expression*>* args = expr->arguments();
628 DCHECK(args->length() == 2);
629 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
630}
631
632
633void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
634 ZoneList<Expression*>* args = expr->arguments();
635 DCHECK(args->length() == 2);
636 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
637}
638
639
640void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
641 context()->Plug(handle(Smi::FromInt(0), isolate()));
642}
643
644
645void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
646 switch (expr->op()) {
647 case Token::COMMA:
648 return VisitComma(expr);
649 case Token::OR:
650 case Token::AND:
651 return VisitLogicalExpression(expr);
652 default:
653 return VisitArithmeticExpression(expr);
654 }
655}
656
657
658void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
659 if (context()->IsEffect()) {
660 VisitForEffect(expr);
661 } else if (context()->IsAccumulatorValue()) {
662 VisitForAccumulatorValue(expr);
663 } else if (context()->IsStackValue()) {
664 VisitForStackValue(expr);
665 } else if (context()->IsTest()) {
666 const TestContext* test = TestContext::cast(context());
667 VisitForControl(expr, test->true_label(), test->false_label(),
668 test->fall_through());
669 }
670}
671
672
673void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
674 Comment cmnt(masm_, "[ Comma");
675 VisitForEffect(expr->left());
676 VisitInDuplicateContext(expr->right());
677}
678
679
680void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
681 bool is_logical_and = expr->op() == Token::AND;
682 Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR");
683 Expression* left = expr->left();
684 Expression* right = expr->right();
685 BailoutId right_id = expr->RightId();
686 Label done;
687
688 if (context()->IsTest()) {
689 Label eval_right;
690 const TestContext* test = TestContext::cast(context());
691 if (is_logical_and) {
692 VisitForControl(left, &eval_right, test->false_label(), &eval_right);
693 } else {
694 VisitForControl(left, test->true_label(), &eval_right, &eval_right);
695 }
696 PrepareForBailoutForId(right_id, NO_REGISTERS);
697 __ bind(&eval_right);
698
699 } else if (context()->IsAccumulatorValue()) {
700 VisitForAccumulatorValue(left);
701 // We want the value in the accumulator for the test, and on the stack in
702 // case we need it.
703 __ Push(result_register());
704 Label discard, restore;
705 if (is_logical_and) {
706 DoTest(left, &discard, &restore, &restore);
707 } else {
708 DoTest(left, &restore, &discard, &restore);
709 }
710 __ bind(&restore);
711 __ Pop(result_register());
712 __ jmp(&done);
713 __ bind(&discard);
714 __ Drop(1);
715 PrepareForBailoutForId(right_id, NO_REGISTERS);
716
717 } else if (context()->IsStackValue()) {
718 VisitForAccumulatorValue(left);
719 // We want the value in the accumulator for the test, and on the stack in
720 // case we need it.
721 __ Push(result_register());
722 Label discard;
723 if (is_logical_and) {
724 DoTest(left, &discard, &done, &discard);
725 } else {
726 DoTest(left, &done, &discard, &discard);
727 }
728 __ bind(&discard);
729 __ Drop(1);
730 PrepareForBailoutForId(right_id, NO_REGISTERS);
731
732 } else {
733 DCHECK(context()->IsEffect());
734 Label eval_right;
735 if (is_logical_and) {
736 VisitForControl(left, &eval_right, &done, &eval_right);
737 } else {
738 VisitForControl(left, &done, &eval_right, &eval_right);
739 }
740 PrepareForBailoutForId(right_id, NO_REGISTERS);
741 __ bind(&eval_right);
742 }
743
744 VisitInDuplicateContext(right);
745 __ bind(&done);
746}
747
748
749void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
750 Token::Value op = expr->op();
751 Comment cmnt(masm_, "[ ArithmeticExpression");
752 Expression* left = expr->left();
753 Expression* right = expr->right();
754
755 VisitForStackValue(left);
756 VisitForAccumulatorValue(right);
757
758 SetExpressionPosition(expr);
759 if (ShouldInlineSmiCase(op)) {
760 EmitInlineSmiBinaryOp(expr, op, left, right);
761 } else {
762 EmitBinaryOp(expr, op);
763 }
764}
765
766
767void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
768 VariableProxy* proxy = expr->AsVariableProxy();
769 DCHECK(!context()->IsEffect());
770 DCHECK(!context()->IsTest());
771
772 if (proxy != NULL && (proxy->var()->IsUnallocatedOrGlobalSlot() ||
773 proxy->var()->IsLookupSlot())) {
774 EmitVariableLoad(proxy, INSIDE_TYPEOF);
775 PrepareForBailout(proxy, TOS_REG);
776 } else {
777 // This expression cannot throw a reference error at the top level.
778 VisitInDuplicateContext(expr);
779 }
780}
781
782
783void FullCodeGenerator::VisitBlock(Block* stmt) {
784 Comment cmnt(masm_, "[ Block");
785 NestedBlock nested_block(this, stmt);
786 SetStatementPosition(stmt);
787
788 {
789 EnterBlockScopeIfNeeded block_scope_state(
790 this, stmt->scope(), stmt->EntryId(), stmt->DeclsId(), stmt->ExitId());
791 VisitStatements(stmt->statements());
792 __ bind(nested_block.break_label());
793 }
794}
795
796
797void FullCodeGenerator::VisitDoExpression(DoExpression* expr) {
798 Comment cmnt(masm_, "[ Do Expression");
799 NestedStatement nested_block(this);
800 SetExpressionPosition(expr);
801 VisitBlock(expr->block());
802 EmitVariableLoad(expr->result());
803}
804
805
806void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
807 Comment cmnt(masm_, "[ ExpressionStatement");
808 SetStatementPosition(stmt);
809 VisitForEffect(stmt->expression());
810}
811
812
813void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
814 Comment cmnt(masm_, "[ EmptyStatement");
815 SetStatementPosition(stmt);
816}
817
818
819void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
820 Comment cmnt(masm_, "[ IfStatement");
821 SetStatementPosition(stmt);
822 Label then_part, else_part, done;
823
824 if (stmt->HasElseStatement()) {
825 VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
826 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
827 __ bind(&then_part);
828 Visit(stmt->then_statement());
829 __ jmp(&done);
830
831 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
832 __ bind(&else_part);
833 Visit(stmt->else_statement());
834 } else {
835 VisitForControl(stmt->condition(), &then_part, &done, &then_part);
836 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
837 __ bind(&then_part);
838 Visit(stmt->then_statement());
839
840 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
841 }
842 __ bind(&done);
843 PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
844}
845
846
847void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
848 Comment cmnt(masm_, "[ ContinueStatement");
849 SetStatementPosition(stmt);
850 NestedStatement* current = nesting_stack_;
851 int stack_depth = 0;
852 int context_length = 0;
853 // When continuing, we clobber the unpredictable value in the accumulator
854 // with one that's safe for GC. If we hit an exit from the try block of
855 // try...finally on our way out, we will unconditionally preserve the
856 // accumulator on the stack.
857 ClearAccumulator();
858 while (!current->IsContinueTarget(stmt->target())) {
859 current = current->Exit(&stack_depth, &context_length);
860 }
861 __ Drop(stack_depth);
862 if (context_length > 0) {
863 while (context_length > 0) {
864 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
865 --context_length;
866 }
867 StoreToFrameField(StandardFrameConstants::kContextOffset,
868 context_register());
869 }
870
871 __ jmp(current->AsIteration()->continue_label());
872}
873
874
875void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
876 Comment cmnt(masm_, "[ BreakStatement");
877 SetStatementPosition(stmt);
878 NestedStatement* current = nesting_stack_;
879 int stack_depth = 0;
880 int context_length = 0;
881 // When breaking, we clobber the unpredictable value in the accumulator
882 // with one that's safe for GC. If we hit an exit from the try block of
883 // try...finally on our way out, we will unconditionally preserve the
884 // accumulator on the stack.
885 ClearAccumulator();
886 while (!current->IsBreakTarget(stmt->target())) {
887 current = current->Exit(&stack_depth, &context_length);
888 }
889 __ Drop(stack_depth);
890 if (context_length > 0) {
891 while (context_length > 0) {
892 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
893 --context_length;
894 }
895 StoreToFrameField(StandardFrameConstants::kContextOffset,
896 context_register());
897 }
898
899 __ jmp(current->AsBreakable()->break_label());
900}
901
902
903void FullCodeGenerator::EmitUnwindBeforeReturn() {
904 NestedStatement* current = nesting_stack_;
905 int stack_depth = 0;
906 int context_length = 0;
907 while (current != NULL) {
908 current = current->Exit(&stack_depth, &context_length);
909 }
910 __ Drop(stack_depth);
911}
912
913
914void FullCodeGenerator::EmitPropertyKey(ObjectLiteralProperty* property,
915 BailoutId bailout_id) {
916 VisitForStackValue(property->key());
917 __ CallRuntime(Runtime::kToName);
918 PrepareForBailoutForId(bailout_id, NO_REGISTERS);
919 __ Push(result_register());
920}
921
922
923void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
924 Comment cmnt(masm_, "[ ReturnStatement");
925 SetStatementPosition(stmt);
926 Expression* expr = stmt->expression();
927 VisitForAccumulatorValue(expr);
928 EmitUnwindBeforeReturn();
929 EmitReturnSequence();
930}
931
932
933void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
934 Comment cmnt(masm_, "[ WithStatement");
935 SetStatementPosition(stmt);
936
937 VisitForAccumulatorValue(stmt->expression());
938 Callable callable = CodeFactory::ToObject(isolate());
939 __ Move(callable.descriptor().GetRegisterParameter(0), result_register());
940 __ Call(callable.code(), RelocInfo::CODE_TARGET);
941 PrepareForBailoutForId(stmt->ToObjectId(), NO_REGISTERS);
942 __ Push(result_register());
943 PushFunctionArgumentForContextAllocation();
944 __ CallRuntime(Runtime::kPushWithContext);
945 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
946 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
947
948 Scope* saved_scope = scope();
949 scope_ = stmt->scope();
950 { WithOrCatch body(this);
951 Visit(stmt->statement());
952 }
953 scope_ = saved_scope;
954
955 // Pop context.
956 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
957 // Update local stack frame context field.
958 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
959}
960
961
962void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
963 Comment cmnt(masm_, "[ DoWhileStatement");
964 // Do not insert break location as we do that below.
965 SetStatementPosition(stmt, SKIP_BREAK);
966
967 Label body, book_keeping;
968
969 Iteration loop_statement(this, stmt);
970 increment_loop_depth();
971
972 __ bind(&body);
973 Visit(stmt->body());
974
975 // Record the position of the do while condition and make sure it is
976 // possible to break on the condition.
977 __ bind(loop_statement.continue_label());
978 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
979
980 // Here is the actual 'while' keyword.
981 SetExpressionAsStatementPosition(stmt->cond());
982 VisitForControl(stmt->cond(),
983 &book_keeping,
984 loop_statement.break_label(),
985 &book_keeping);
986
987 // Check stack before looping.
988 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
989 __ bind(&book_keeping);
990 EmitBackEdgeBookkeeping(stmt, &body);
991 __ jmp(&body);
992
993 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
994 __ bind(loop_statement.break_label());
995 decrement_loop_depth();
996}
997
998
999void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1000 Comment cmnt(masm_, "[ WhileStatement");
1001 Label loop, body;
1002
1003 Iteration loop_statement(this, stmt);
1004 increment_loop_depth();
1005
1006 __ bind(&loop);
1007
1008 SetExpressionAsStatementPosition(stmt->cond());
1009 VisitForControl(stmt->cond(),
1010 &body,
1011 loop_statement.break_label(),
1012 &body);
1013
1014 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1015 __ bind(&body);
1016 Visit(stmt->body());
1017
1018 __ bind(loop_statement.continue_label());
1019
1020 // Check stack before looping.
1021 EmitBackEdgeBookkeeping(stmt, &loop);
1022 __ jmp(&loop);
1023
1024 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1025 __ bind(loop_statement.break_label());
1026 decrement_loop_depth();
1027}
1028
1029
1030void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1031 Comment cmnt(masm_, "[ ForStatement");
1032 // Do not insert break location as we do it below.
1033 SetStatementPosition(stmt, SKIP_BREAK);
1034
1035 Label test, body;
1036
1037 Iteration loop_statement(this, stmt);
1038
1039 if (stmt->init() != NULL) {
1040 SetStatementPosition(stmt->init());
1041 Visit(stmt->init());
1042 }
1043
1044 increment_loop_depth();
1045 // Emit the test at the bottom of the loop (even if empty).
1046 __ jmp(&test);
1047
1048 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1049 __ bind(&body);
1050 Visit(stmt->body());
1051
1052 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1053 __ bind(loop_statement.continue_label());
1054 if (stmt->next() != NULL) {
1055 SetStatementPosition(stmt->next());
1056 Visit(stmt->next());
1057 }
1058
1059 // Check stack before looping.
1060 EmitBackEdgeBookkeeping(stmt, &body);
1061
1062 __ bind(&test);
1063 if (stmt->cond() != NULL) {
1064 SetExpressionAsStatementPosition(stmt->cond());
1065 VisitForControl(stmt->cond(),
1066 &body,
1067 loop_statement.break_label(),
1068 loop_statement.break_label());
1069 } else {
1070 __ jmp(&body);
1071 }
1072
1073 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1074 __ bind(loop_statement.break_label());
1075 decrement_loop_depth();
1076}
1077
1078
1079void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1080 Comment cmnt(masm_, "[ ForOfStatement");
1081
1082 Iteration loop_statement(this, stmt);
1083 increment_loop_depth();
1084
1085 // var iterator = iterable[Symbol.iterator]();
1086 VisitForEffect(stmt->assign_iterator());
1087
1088 // Loop entry.
1089 __ bind(loop_statement.continue_label());
1090
1091 // result = iterator.next()
1092 SetExpressionAsStatementPosition(stmt->next_result());
1093 VisitForEffect(stmt->next_result());
1094
1095 // if (result.done) break;
1096 Label result_not_done;
1097 VisitForControl(stmt->result_done(), loop_statement.break_label(),
1098 &result_not_done, &result_not_done);
1099 __ bind(&result_not_done);
1100
1101 // each = result.value
1102 VisitForEffect(stmt->assign_each());
1103
1104 // Generate code for the body of the loop.
1105 Visit(stmt->body());
1106
1107 // Check stack before looping.
1108 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1109 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1110 __ jmp(loop_statement.continue_label());
1111
1112 // Exit and decrement the loop depth.
1113 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1114 __ bind(loop_statement.break_label());
1115 decrement_loop_depth();
1116}
1117
1118
1119void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1120 Comment cmnt(masm_, "[ TryCatchStatement");
1121 SetStatementPosition(stmt, SKIP_BREAK);
1122
1123 // The try block adds a handler to the exception handler chain before
1124 // entering, and removes it again when exiting normally. If an exception
1125 // is thrown during execution of the try block, the handler is consumed
1126 // and control is passed to the catch block with the exception in the
1127 // result register.
1128
1129 Label try_entry, handler_entry, exit;
1130 __ jmp(&try_entry);
1131 __ bind(&handler_entry);
1132 PrepareForBailoutForId(stmt->HandlerId(), NO_REGISTERS);
1133 ClearPendingMessage();
1134
1135 // Exception handler code, the exception is in the result register.
1136 // Extend the context before executing the catch block.
1137 { Comment cmnt(masm_, "[ Extend catch context");
1138 __ Push(stmt->variable()->name());
1139 __ Push(result_register());
1140 PushFunctionArgumentForContextAllocation();
1141 __ CallRuntime(Runtime::kPushCatchContext);
1142 StoreToFrameField(StandardFrameConstants::kContextOffset,
1143 context_register());
1144 }
1145
1146 Scope* saved_scope = scope();
1147 scope_ = stmt->scope();
1148 DCHECK(scope_->declarations()->is_empty());
1149 { WithOrCatch catch_body(this);
1150 Visit(stmt->catch_block());
1151 }
1152 // Restore the context.
1153 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1154 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1155 scope_ = saved_scope;
1156 __ jmp(&exit);
1157
1158 // Try block code. Sets up the exception handler chain.
1159 __ bind(&try_entry);
1160
1161 try_catch_depth_++;
1162 int handler_index = NewHandlerTableEntry();
1163 EnterTryBlock(handler_index, &handler_entry);
1164 { TryCatch try_body(this);
1165 Visit(stmt->try_block());
1166 }
1167 ExitTryBlock(handler_index);
1168 try_catch_depth_--;
1169 __ bind(&exit);
1170}
1171
1172
1173void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1174 Comment cmnt(masm_, "[ TryFinallyStatement");
1175 SetStatementPosition(stmt, SKIP_BREAK);
1176
1177 // Try finally is compiled by setting up a try-handler on the stack while
1178 // executing the try body, and removing it again afterwards.
1179 //
1180 // The try-finally construct can enter the finally block in three ways:
1181 // 1. By exiting the try-block normally. This removes the try-handler and
1182 // calls the finally block code before continuing.
1183 // 2. By exiting the try-block with a function-local control flow transfer
1184 // (break/continue/return). The site of the, e.g., break removes the
1185 // try handler and calls the finally block code before continuing
1186 // its outward control transfer.
1187 // 3. By exiting the try-block with a thrown exception.
1188 // This can happen in nested function calls. It traverses the try-handler
1189 // chain and consumes the try-handler entry before jumping to the
1190 // handler code. The handler code then calls the finally-block before
1191 // rethrowing the exception.
1192 //
1193 // The finally block must assume a return address on top of the stack
1194 // (or in the link register on ARM chips) and a value (return value or
1195 // exception) in the result register (rax/eax/r0), both of which must
1196 // be preserved. The return address isn't GC-safe, so it should be
1197 // cooked before GC.
1198 Label try_entry, handler_entry, finally_entry;
1199
1200 // Jump to try-handler setup and try-block code.
1201 __ jmp(&try_entry);
1202 __ bind(&handler_entry);
1203 PrepareForBailoutForId(stmt->HandlerId(), NO_REGISTERS);
1204
1205 // Exception handler code. This code is only executed when an exception
1206 // is thrown. The exception is in the result register, and must be
1207 // preserved by the finally block. Call the finally block and then
1208 // rethrow the exception if it returns.
1209 __ Call(&finally_entry);
1210 __ Push(result_register());
1211 __ CallRuntime(Runtime::kReThrow);
1212
1213 // Finally block implementation.
1214 __ bind(&finally_entry);
1215 EnterFinallyBlock();
1216 { Finally finally_body(this);
1217 Visit(stmt->finally_block());
1218 }
1219 ExitFinallyBlock(); // Return to the calling code.
1220
1221 // Set up try handler.
1222 __ bind(&try_entry);
1223 int handler_index = NewHandlerTableEntry();
1224 EnterTryBlock(handler_index, &handler_entry);
1225 { TryFinally try_body(this, &finally_entry);
1226 Visit(stmt->try_block());
1227 }
1228 ExitTryBlock(handler_index);
1229 // Execute the finally block on the way out. Clobber the unpredictable
1230 // value in the result register with one that's safe for GC because the
1231 // finally block will unconditionally preserve the result register on the
1232 // stack.
1233 ClearAccumulator();
1234 __ Call(&finally_entry);
1235}
1236
1237
1238void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1239 Comment cmnt(masm_, "[ DebuggerStatement");
1240 SetStatementPosition(stmt);
1241
1242 __ DebugBreak();
1243 // Ignore the return value.
1244
1245 PrepareForBailoutForId(stmt->DebugBreakId(), NO_REGISTERS);
1246}
1247
1248
1249void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1250 UNREACHABLE();
1251}
1252
1253
1254void FullCodeGenerator::VisitConditional(Conditional* expr) {
1255 Comment cmnt(masm_, "[ Conditional");
1256 Label true_case, false_case, done;
1257 VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1258
1259 PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1260 __ bind(&true_case);
1261 SetExpressionPosition(expr->then_expression());
1262 if (context()->IsTest()) {
1263 const TestContext* for_test = TestContext::cast(context());
1264 VisitForControl(expr->then_expression(),
1265 for_test->true_label(),
1266 for_test->false_label(),
1267 NULL);
1268 } else {
1269 VisitInDuplicateContext(expr->then_expression());
1270 __ jmp(&done);
1271 }
1272
1273 PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1274 __ bind(&false_case);
1275 SetExpressionPosition(expr->else_expression());
1276 VisitInDuplicateContext(expr->else_expression());
1277 // If control flow falls through Visit, merge it with true case here.
1278 if (!context()->IsTest()) {
1279 __ bind(&done);
1280 }
1281}
1282
1283
1284void FullCodeGenerator::VisitLiteral(Literal* expr) {
1285 Comment cmnt(masm_, "[ Literal");
1286 context()->Plug(expr->value());
1287}
1288
1289
1290void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1291 Comment cmnt(masm_, "[ FunctionLiteral");
1292
1293 // Build the function boilerplate and instantiate it.
1294 Handle<SharedFunctionInfo> function_info =
1295 Compiler::GetSharedFunctionInfo(expr, script(), info_);
1296 if (function_info.is_null()) {
1297 SetStackOverflow();
1298 return;
1299 }
1300 EmitNewClosure(function_info, expr->pretenure());
1301}
1302
1303
1304void FullCodeGenerator::VisitClassLiteral(ClassLiteral* lit) {
1305 Comment cmnt(masm_, "[ ClassLiteral");
1306
1307 {
1308 EnterBlockScopeIfNeeded block_scope_state(
1309 this, lit->scope(), lit->EntryId(), lit->DeclsId(), lit->ExitId());
1310
1311 if (lit->raw_name() != NULL) {
1312 __ Push(lit->name());
1313 } else {
1314 __ Push(isolate()->factory()->undefined_value());
1315 }
1316
1317 if (lit->extends() != NULL) {
1318 VisitForStackValue(lit->extends());
1319 } else {
1320 __ Push(isolate()->factory()->the_hole_value());
1321 }
1322
1323 VisitForStackValue(lit->constructor());
1324
1325 __ Push(Smi::FromInt(lit->start_position()));
1326 __ Push(Smi::FromInt(lit->end_position()));
1327
1328 __ CallRuntime(Runtime::kDefineClass);
1329 PrepareForBailoutForId(lit->CreateLiteralId(), TOS_REG);
1330
1331 EmitClassDefineProperties(lit);
1332
1333 if (lit->class_variable_proxy() != nullptr) {
1334 EmitVariableAssignment(lit->class_variable_proxy()->var(), Token::INIT,
1335 lit->ProxySlot());
1336 }
1337 }
1338
1339 context()->Plug(result_register());
1340}
1341
1342
1343void FullCodeGenerator::VisitNativeFunctionLiteral(
1344 NativeFunctionLiteral* expr) {
1345 Comment cmnt(masm_, "[ NativeFunctionLiteral");
1346
1347 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate());
1348
1349 // Compute the function template for the native function.
1350 Handle<String> name = expr->name();
1351 v8::Local<v8::FunctionTemplate> fun_template =
1352 expr->extension()->GetNativeFunctionTemplate(v8_isolate,
1353 v8::Utils::ToLocal(name));
1354 DCHECK(!fun_template.IsEmpty());
1355
1356 // Instantiate the function and create a shared function info from it.
1357 Handle<JSFunction> fun = Handle<JSFunction>::cast(Utils::OpenHandle(
1358 *fun_template->GetFunction(v8_isolate->GetCurrentContext())
1359 .ToLocalChecked()));
1360 const int literals = fun->NumberOfLiterals();
1361 Handle<Code> code = Handle<Code>(fun->shared()->code());
1362 Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1363 Handle<SharedFunctionInfo> shared =
1364 isolate()->factory()->NewSharedFunctionInfo(
1365 name, literals, FunctionKind::kNormalFunction, code,
1366 Handle<ScopeInfo>(fun->shared()->scope_info()),
1367 Handle<TypeFeedbackVector>(fun->shared()->feedback_vector()));
1368 shared->set_construct_stub(*construct_stub);
1369
1370 // Copy the function data to the shared function info.
1371 shared->set_function_data(fun->shared()->function_data());
1372 int parameters = fun->shared()->internal_formal_parameter_count();
1373 shared->set_internal_formal_parameter_count(parameters);
1374
1375 EmitNewClosure(shared, false);
1376}
1377
1378
1379void FullCodeGenerator::VisitThrow(Throw* expr) {
1380 Comment cmnt(masm_, "[ Throw");
1381 VisitForStackValue(expr->exception());
1382 SetExpressionPosition(expr);
1383 __ CallRuntime(Runtime::kThrow);
1384 // Never returns here.
1385}
1386
1387
1388void FullCodeGenerator::EnterTryBlock(int handler_index, Label* handler) {
1389 HandlerTableEntry* entry = &handler_table_[handler_index];
1390 entry->range_start = masm()->pc_offset();
1391 entry->handler_offset = handler->pos();
1392 entry->try_catch_depth = try_catch_depth_;
1393
1394 // Determine expression stack depth of try statement.
1395 int stack_depth = info_->scope()->num_stack_slots(); // Include stack locals.
1396 for (NestedStatement* current = nesting_stack_; current != NULL; /*nop*/) {
1397 current = current->AccumulateDepth(&stack_depth);
1398 }
1399 entry->stack_depth = stack_depth;
1400
1401 // Push context onto operand stack.
1402 STATIC_ASSERT(TryBlockConstant::kElementCount == 1);
1403 __ Push(context_register());
1404}
1405
1406
1407void FullCodeGenerator::ExitTryBlock(int handler_index) {
1408 HandlerTableEntry* entry = &handler_table_[handler_index];
1409 entry->range_end = masm()->pc_offset();
1410
1411 // Drop context from operand stack.
1412 __ Drop(TryBlockConstant::kElementCount);
1413}
1414
1415
1416void FullCodeGenerator::VisitCall(Call* expr) {
1417#ifdef DEBUG
1418 // We want to verify that RecordJSReturnSite gets called on all paths
1419 // through this function. Avoid early returns.
1420 expr->return_is_recorded_ = false;
1421#endif
1422
1423 Comment cmnt(masm_, "[ Call");
1424 Expression* callee = expr->expression();
1425 Call::CallType call_type = expr->GetCallType(isolate());
1426
1427 switch (call_type) {
1428 case Call::POSSIBLY_EVAL_CALL:
1429 EmitPossiblyEvalCall(expr);
1430 break;
1431 case Call::GLOBAL_CALL:
1432 EmitCallWithLoadIC(expr);
1433 break;
1434 case Call::LOOKUP_SLOT_CALL:
1435 // Call to a lookup slot (dynamically introduced variable).
1436 PushCalleeAndWithBaseObject(expr);
1437 EmitCall(expr);
1438 break;
1439 case Call::NAMED_PROPERTY_CALL: {
1440 Property* property = callee->AsProperty();
1441 VisitForStackValue(property->obj());
1442 EmitCallWithLoadIC(expr);
1443 break;
1444 }
1445 case Call::KEYED_PROPERTY_CALL: {
1446 Property* property = callee->AsProperty();
1447 VisitForStackValue(property->obj());
1448 EmitKeyedCallWithLoadIC(expr, property->key());
1449 break;
1450 }
1451 case Call::NAMED_SUPER_PROPERTY_CALL:
1452 EmitSuperCallWithLoadIC(expr);
1453 break;
1454 case Call::KEYED_SUPER_PROPERTY_CALL:
1455 EmitKeyedSuperCallWithLoadIC(expr);
1456 break;
1457 case Call::SUPER_CALL:
1458 EmitSuperConstructorCall(expr);
1459 break;
1460 case Call::OTHER_CALL:
1461 // Call to an arbitrary expression not handled specially above.
1462 VisitForStackValue(callee);
1463 __ PushRoot(Heap::kUndefinedValueRootIndex);
1464 // Emit function call.
1465 EmitCall(expr);
1466 break;
1467 }
1468
1469#ifdef DEBUG
1470 // RecordJSReturnSite should have been called.
1471 DCHECK(expr->return_is_recorded_);
1472#endif
1473}
1474
1475
1476void FullCodeGenerator::VisitSpread(Spread* expr) { UNREACHABLE(); }
1477
1478
1479void FullCodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
1480 UNREACHABLE();
1481}
1482
1483
1484void FullCodeGenerator::VisitRewritableAssignmentExpression(
1485 RewritableAssignmentExpression* expr) {
1486 Visit(expr->expression());
1487}
1488
1489
1490FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
1491 int* stack_depth, int* context_length) {
1492 // The macros used here must preserve the result register.
1493
1494 // Because the handler block contains the context of the finally
1495 // code, we can restore it directly from there for the finally code
1496 // rather than iteratively unwinding contexts via their previous
1497 // links.
1498 if (*context_length > 0) {
1499 __ Drop(*stack_depth); // Down to the handler block.
1500 // Restore the context to its dedicated register and the stack.
1501 STATIC_ASSERT(TryFinally::kElementCount == 1);
1502 __ Pop(codegen_->context_register());
1503 codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1504 codegen_->context_register());
1505 } else {
1506 // Down to the handler block and also drop context.
1507 __ Drop(*stack_depth + kElementCount);
1508 }
1509 __ Call(finally_entry_);
1510
1511 *stack_depth = 0;
1512 *context_length = 0;
1513 return previous_;
1514}
1515
1516
1517bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1518 Expression* sub_expr;
1519 Handle<String> check;
1520 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1521 EmitLiteralCompareTypeof(expr, sub_expr, check);
1522 return true;
1523 }
1524
1525 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1526 EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1527 return true;
1528 }
1529
1530 if (expr->IsLiteralCompareNull(&sub_expr)) {
1531 EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1532 return true;
1533 }
1534
1535 return false;
1536}
1537
1538
1539void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1540 DisallowHeapAllocation no_gc;
1541 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1542
1543 // Increment loop nesting level by one and iterate over the back edge table
1544 // to find the matching loops to patch the interrupt
1545 // call to an unconditional call to the replacement code.
1546 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level() + 1;
1547 if (loop_nesting_level > Code::kMaxLoopNestingMarker) return;
1548
1549 BackEdgeTable back_edges(unoptimized, &no_gc);
1550 for (uint32_t i = 0; i < back_edges.length(); i++) {
1551 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1552 DCHECK_EQ(INTERRUPT, GetBackEdgeState(isolate,
1553 unoptimized,
1554 back_edges.pc(i)));
1555 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1556 }
1557 }
1558
1559 unoptimized->set_allow_osr_at_loop_nesting_level(loop_nesting_level);
1560 DCHECK(Verify(isolate, unoptimized));
1561}
1562
1563
1564void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1565 DisallowHeapAllocation no_gc;
1566 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1567
1568 // Iterate over the back edge table and revert the patched interrupt calls.
1569 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1570
1571 BackEdgeTable back_edges(unoptimized, &no_gc);
1572 for (uint32_t i = 0; i < back_edges.length(); i++) {
1573 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1574 DCHECK_NE(INTERRUPT, GetBackEdgeState(isolate,
1575 unoptimized,
1576 back_edges.pc(i)));
1577 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1578 }
1579 }
1580
1581 unoptimized->set_allow_osr_at_loop_nesting_level(0);
1582 // Assert that none of the back edges are patched anymore.
1583 DCHECK(Verify(isolate, unoptimized));
1584}
1585
1586
1587void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
1588 DisallowHeapAllocation no_gc;
1589 Isolate* isolate = code->GetIsolate();
1590 Address pc = code->instruction_start() + pc_offset;
1591 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1592 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
1593}
1594
1595
1596void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
1597 DisallowHeapAllocation no_gc;
1598 Isolate* isolate = code->GetIsolate();
1599 Address pc = code->instruction_start() + pc_offset;
1600
1601 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
1602 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1603 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
1604 }
1605}
1606
1607
1608#ifdef DEBUG
1609bool BackEdgeTable::Verify(Isolate* isolate, Code* unoptimized) {
1610 DisallowHeapAllocation no_gc;
1611 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1612 BackEdgeTable back_edges(unoptimized, &no_gc);
1613 for (uint32_t i = 0; i < back_edges.length(); i++) {
1614 uint32_t loop_depth = back_edges.loop_depth(i);
1615 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1616 // Assert that all back edges for shallower loops (and only those)
1617 // have already been patched.
1618 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1619 GetBackEdgeState(isolate,
1620 unoptimized,
1621 back_edges.pc(i)) != INTERRUPT);
1622 }
1623 return true;
1624}
1625#endif // DEBUG
1626
1627
1628FullCodeGenerator::EnterBlockScopeIfNeeded::EnterBlockScopeIfNeeded(
1629 FullCodeGenerator* codegen, Scope* scope, BailoutId entry_id,
1630 BailoutId declarations_id, BailoutId exit_id)
1631 : codegen_(codegen), exit_id_(exit_id) {
1632 saved_scope_ = codegen_->scope();
1633
1634 if (scope == NULL) {
1635 codegen_->PrepareForBailoutForId(entry_id, NO_REGISTERS);
1636 needs_block_context_ = false;
1637 } else {
1638 needs_block_context_ = scope->NeedsContext();
1639 codegen_->scope_ = scope;
1640 {
1641 if (needs_block_context_) {
1642 Comment cmnt(masm(), "[ Extend block context");
1643 __ Push(scope->GetScopeInfo(codegen->isolate()));
1644 codegen_->PushFunctionArgumentForContextAllocation();
1645 __ CallRuntime(Runtime::kPushBlockContext);
1646
1647 // Replace the context stored in the frame.
1648 codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1649 codegen_->context_register());
1650 }
1651 CHECK_EQ(0, scope->num_stack_slots());
1652 codegen_->PrepareForBailoutForId(entry_id, NO_REGISTERS);
1653 }
1654 {
1655 Comment cmnt(masm(), "[ Declarations");
1656 codegen_->VisitDeclarations(scope->declarations());
1657 codegen_->PrepareForBailoutForId(declarations_id, NO_REGISTERS);
1658 }
1659 }
1660}
1661
1662
1663FullCodeGenerator::EnterBlockScopeIfNeeded::~EnterBlockScopeIfNeeded() {
1664 if (needs_block_context_) {
1665 codegen_->LoadContextField(codegen_->context_register(),
1666 Context::PREVIOUS_INDEX);
1667 // Update local stack frame context field.
1668 codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1669 codegen_->context_register());
1670 }
1671 codegen_->PrepareForBailoutForId(exit_id_, NO_REGISTERS);
1672 codegen_->scope_ = saved_scope_;
1673}
1674
1675
1676bool FullCodeGenerator::NeedsHoleCheckForLoad(VariableProxy* proxy) {
1677 Variable* var = proxy->var();
1678
1679 if (!var->binding_needs_init()) {
1680 return false;
1681 }
1682
1683 // var->scope() may be NULL when the proxy is located in eval code and
1684 // refers to a potential outside binding. Currently those bindings are
1685 // always looked up dynamically, i.e. in that case
1686 // var->location() == LOOKUP.
1687 // always holds.
1688 DCHECK(var->scope() != NULL);
1689 DCHECK(var->location() == VariableLocation::PARAMETER ||
1690 var->location() == VariableLocation::LOCAL ||
1691 var->location() == VariableLocation::CONTEXT);
1692
1693 // Check if the binding really needs an initialization check. The check
1694 // can be skipped in the following situation: we have a LET or CONST
1695 // binding in harmony mode, both the Variable and the VariableProxy have
1696 // the same declaration scope (i.e. they are both in global code, in the
1697 // same function or in the same eval code), the VariableProxy is in
1698 // the source physically located after the initializer of the variable,
1699 // and that the initializer cannot be skipped due to a nonlinear scope.
1700 //
1701 // We cannot skip any initialization checks for CONST in non-harmony
1702 // mode because const variables may be declared but never initialized:
1703 // if (false) { const x; }; var y = x;
1704 //
1705 // The condition on the declaration scopes is a conservative check for
1706 // nested functions that access a binding and are called before the
1707 // binding is initialized:
1708 // function() { f(); let x = 1; function f() { x = 2; } }
1709 //
1710 // The check cannot be skipped on non-linear scopes, namely switch
1711 // scopes, to ensure tests are done in cases like the following:
1712 // switch (1) { case 0: let x = 2; case 1: f(x); }
1713 // The scope of the variable needs to be checked, in case the use is
1714 // in a sub-block which may be linear.
1715 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1716 return true;
1717 }
1718
1719 if (var->is_this()) {
1720 DCHECK(literal() != nullptr &&
1721 (literal()->kind() & kSubclassConstructor) != 0);
1722 // TODO(littledan): implement 'this' hole check elimination.
1723 return true;
1724 }
1725
1726 // Check that we always have valid source position.
1727 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1728 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1729
1730 return var->mode() == CONST_LEGACY || var->scope()->is_nonlinear() ||
1731 var->initializer_position() >= proxy->position();
1732}
1733
1734
1735#undef __
1736
1737
1738} // namespace internal
1739} // namespace v8