blob: fadcd7cb5dc91939afaf1cc45332aaeb9c1dc9bb [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_IA32
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ia32/frames-ia32.h"
14#include "src/ic/ic.h"
15#include "src/parsing/parser.h"
16
17namespace v8 {
18namespace internal {
19
Ben Murdoch097c5b22016-05-18 11:27:45 +010020#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021
22class JumpPatchSite BASE_EMBEDDED {
23 public:
24 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
25#ifdef DEBUG
26 info_emitted_ = false;
27#endif
28 }
29
30 ~JumpPatchSite() {
31 DCHECK(patch_site_.is_bound() == info_emitted_);
32 }
33
34 void EmitJumpIfNotSmi(Register reg,
35 Label* target,
36 Label::Distance distance = Label::kFar) {
37 __ test(reg, Immediate(kSmiTagMask));
38 EmitJump(not_carry, target, distance); // Always taken before patched.
39 }
40
41 void EmitJumpIfSmi(Register reg,
42 Label* target,
43 Label::Distance distance = Label::kFar) {
44 __ test(reg, Immediate(kSmiTagMask));
45 EmitJump(carry, target, distance); // Never taken before patched.
46 }
47
48 void EmitPatchInfo() {
49 if (patch_site_.is_bound()) {
50 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
51 DCHECK(is_uint8(delta_to_patch_site));
52 __ test(eax, Immediate(delta_to_patch_site));
53#ifdef DEBUG
54 info_emitted_ = true;
55#endif
56 } else {
57 __ nop(); // Signals no inlined code.
58 }
59 }
60
61 private:
62 // jc will be patched with jz, jnc will become jnz.
63 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
64 DCHECK(!patch_site_.is_bound() && !info_emitted_);
65 DCHECK(cc == carry || cc == not_carry);
66 __ bind(&patch_site_);
67 __ j(cc, target, distance);
68 }
69
Ben Murdoch097c5b22016-05-18 11:27:45 +010070 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 MacroAssembler* masm_;
72 Label patch_site_;
73#ifdef DEBUG
74 bool info_emitted_;
75#endif
76};
77
78
79// Generate code for a JS function. On entry to the function the receiver
80// and arguments have been pushed on the stack left to right, with the
81// return address on top of them. The actual argument count matches the
82// formal parameter count expected by the function.
83//
84// The live registers are:
85// o edi: the JS function object being called (i.e. ourselves)
86// o edx: the new target value
87// o esi: our context
88// o ebp: our caller's frame pointer
89// o esp: stack pointer (pointing to return address)
90//
91// The function builds a JS frame. Please see JavaScriptFrameConstants in
92// frames-ia32.h for its layout.
93void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
95 profiling_counter_ = isolate()->factory()->NewCell(
96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97 SetFunctionPosition(literal());
98 Comment cmnt(masm_, "[ function compiled by full code generator");
99
100 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
103 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
104 __ mov(ecx, Operand(esp, receiver_offset));
105 __ AssertNotSmi(ecx);
106 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
107 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
108 }
109
110 // Open a frame scope to indicate that there is a frame on the stack. The
111 // MANUAL indicates that the scope shouldn't actually generate code to set up
112 // the frame (that is done below).
113 FrameScope frame_scope(masm_, StackFrame::MANUAL);
114
115 info->set_prologue_offset(masm_->pc_offset());
116 __ Prologue(info->GeneratePreagedPrologue());
117
118 { Comment cmnt(masm_, "[ Allocate locals");
119 int locals_count = info->scope()->num_stack_slots();
120 // Generators allocate locals, if any, in context slots.
121 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100122 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 if (locals_count == 1) {
124 __ push(Immediate(isolate()->factory()->undefined_value()));
125 } else if (locals_count > 1) {
126 if (locals_count >= 128) {
127 Label ok;
128 __ mov(ecx, esp);
129 __ sub(ecx, Immediate(locals_count * kPointerSize));
130 ExternalReference stack_limit =
131 ExternalReference::address_of_real_stack_limit(isolate());
132 __ cmp(ecx, Operand::StaticVariable(stack_limit));
133 __ j(above_equal, &ok, Label::kNear);
134 __ CallRuntime(Runtime::kThrowStackOverflow);
135 __ bind(&ok);
136 }
137 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
138 const int kMaxPushes = 32;
139 if (locals_count >= kMaxPushes) {
140 int loop_iterations = locals_count / kMaxPushes;
141 __ mov(ecx, loop_iterations);
142 Label loop_header;
143 __ bind(&loop_header);
144 // Do pushes.
145 for (int i = 0; i < kMaxPushes; i++) {
146 __ push(eax);
147 }
148 __ dec(ecx);
149 __ j(not_zero, &loop_header, Label::kNear);
150 }
151 int remaining = locals_count % kMaxPushes;
152 // Emit the remaining pushes.
153 for (int i = 0; i < remaining; i++) {
154 __ push(eax);
155 }
156 }
157 }
158
159 bool function_in_register = true;
160
161 // Possibly allocate a local context.
162 if (info->scope()->num_heap_slots() > 0) {
163 Comment cmnt(masm_, "[ Allocate context");
164 bool need_write_barrier = true;
165 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
166 // Argument to NewContext is the function, which is still in edi.
167 if (info->scope()->is_script_scope()) {
168 __ push(edi);
169 __ Push(info->scope()->GetScopeInfo(info->isolate()));
170 __ CallRuntime(Runtime::kNewScriptContext);
171 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
172 // The new target value is not used, clobbering is safe.
173 DCHECK_NULL(info->scope()->new_target_var());
174 } else {
175 if (info->scope()->new_target_var() != nullptr) {
176 __ push(edx); // Preserve new target.
177 }
178 if (slots <= FastNewContextStub::kMaximumSlots) {
179 FastNewContextStub stub(isolate(), slots);
180 __ CallStub(&stub);
181 // Result of FastNewContextStub is always in new space.
182 need_write_barrier = false;
183 } else {
184 __ push(edi);
185 __ CallRuntime(Runtime::kNewFunctionContext);
186 }
187 if (info->scope()->new_target_var() != nullptr) {
188 __ pop(edx); // Restore new target.
189 }
190 }
191 function_in_register = false;
192 // Context is returned in eax. It replaces the context passed to us.
193 // It's saved in the stack and kept live in esi.
194 __ mov(esi, eax);
195 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
196
197 // Copy parameters into context if necessary.
198 int num_parameters = info->scope()->num_parameters();
199 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
200 for (int i = first_parameter; i < num_parameters; i++) {
201 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
202 if (var->IsContextSlot()) {
203 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204 (num_parameters - 1 - i) * kPointerSize;
205 // Load parameter from stack.
206 __ mov(eax, Operand(ebp, parameter_offset));
207 // Store it in the context.
208 int context_offset = Context::SlotOffset(var->index());
209 __ mov(Operand(esi, context_offset), eax);
210 // Update the write barrier. This clobbers eax and ebx.
211 if (need_write_barrier) {
212 __ RecordWriteContextSlot(esi,
213 context_offset,
214 eax,
215 ebx,
216 kDontSaveFPRegs);
217 } else if (FLAG_debug_code) {
218 Label done;
219 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
220 __ Abort(kExpectedNewSpaceObject);
221 __ bind(&done);
222 }
223 }
224 }
225 }
226
227 // Register holding this function and new target are both trashed in case we
228 // bailout here. But since that can happen only when new target is not used
229 // and we allocate a context, the value of |function_in_register| is correct.
230 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
231
232 // Possibly set up a local binding to the this function which is used in
233 // derived constructors with super calls.
234 Variable* this_function_var = scope()->this_function_var();
235 if (this_function_var != nullptr) {
236 Comment cmnt(masm_, "[ This function");
237 if (!function_in_register) {
238 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
239 // The write barrier clobbers register again, keep it marked as such.
240 }
241 SetVar(this_function_var, edi, ebx, ecx);
242 }
243
244 // Possibly set up a local binding to the new target value.
245 Variable* new_target_var = scope()->new_target_var();
246 if (new_target_var != nullptr) {
247 Comment cmnt(masm_, "[ new.target");
248 SetVar(new_target_var, edx, ebx, ecx);
249 }
250
251 // Possibly allocate RestParameters
252 int rest_index;
253 Variable* rest_param = scope()->rest_parameter(&rest_index);
254 if (rest_param) {
255 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100256 if (!function_in_register) {
257 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
258 }
259 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000260 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100261 function_in_register = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262 SetVar(rest_param, eax, ebx, edx);
263 }
264
265 Variable* arguments = scope()->arguments();
266 if (arguments != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100267 // Arguments object must be allocated after the context object, in
268 // case the "arguments" or ".arguments" variables are in the context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000269 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 if (!function_in_register) {
271 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
272 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100273 if (is_strict(language_mode()) || !has_simple_parameters()) {
274 FastNewStrictArgumentsStub stub(isolate());
275 __ CallStub(&stub);
276 } else if (literal()->has_duplicate_parameters()) {
277 __ Push(edi);
278 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
279 } else {
280 FastNewSloppyArgumentsStub stub(isolate());
281 __ CallStub(&stub);
282 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283
284 SetVar(arguments, eax, ebx, edx);
285 }
286
287 if (FLAG_trace) {
288 __ CallRuntime(Runtime::kTraceEnter);
289 }
290
291 // Visit the declarations and body unless there is an illegal
292 // redeclaration.
293 if (scope()->HasIllegalRedeclaration()) {
294 Comment cmnt(masm_, "[ Declarations");
295 VisitForEffect(scope()->GetIllegalRedeclaration());
296
297 } else {
298 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
299 { Comment cmnt(masm_, "[ Declarations");
300 VisitDeclarations(scope()->declarations());
301 }
302
303 // Assert that the declarations do not use ICs. Otherwise the debugger
304 // won't be able to redirect a PC at an IC to the correct IC in newly
305 // recompiled code.
306 DCHECK_EQ(0, ic_total_count_);
307
308 { Comment cmnt(masm_, "[ Stack check");
309 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
310 Label ok;
311 ExternalReference stack_limit
312 = ExternalReference::address_of_stack_limit(isolate());
313 __ cmp(esp, Operand::StaticVariable(stack_limit));
314 __ j(above_equal, &ok, Label::kNear);
315 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
316 __ bind(&ok);
317 }
318
319 { Comment cmnt(masm_, "[ Body");
320 DCHECK(loop_depth() == 0);
321 VisitStatements(literal()->body());
322 DCHECK(loop_depth() == 0);
323 }
324 }
325
326 // Always emit a 'return undefined' in case control fell off the end of
327 // the body.
328 { Comment cmnt(masm_, "[ return <undefined>;");
329 __ mov(eax, isolate()->factory()->undefined_value());
330 EmitReturnSequence();
331 }
332}
333
334
335void FullCodeGenerator::ClearAccumulator() {
336 __ Move(eax, Immediate(Smi::FromInt(0)));
337}
338
339
340void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
341 __ mov(ebx, Immediate(profiling_counter_));
342 __ sub(FieldOperand(ebx, Cell::kValueOffset),
343 Immediate(Smi::FromInt(delta)));
344}
345
346
347void FullCodeGenerator::EmitProfilingCounterReset() {
348 int reset_value = FLAG_interrupt_budget;
349 __ mov(ebx, Immediate(profiling_counter_));
350 __ mov(FieldOperand(ebx, Cell::kValueOffset),
351 Immediate(Smi::FromInt(reset_value)));
352}
353
354
355void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
356 Label* back_edge_target) {
357 Comment cmnt(masm_, "[ Back edge bookkeeping");
358 Label ok;
359
360 DCHECK(back_edge_target->is_bound());
361 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
362 int weight = Min(kMaxBackEdgeWeight,
363 Max(1, distance / kCodeSizeMultiplier));
364 EmitProfilingCounterDecrement(weight);
365 __ j(positive, &ok, Label::kNear);
366 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
367
368 // Record a mapping of this PC offset to the OSR id. This is used to find
369 // the AST id from the unoptimized code in order to use it as a key into
370 // the deoptimization input data found in the optimized code.
371 RecordBackEdge(stmt->OsrEntryId());
372
373 EmitProfilingCounterReset();
374
375 __ bind(&ok);
376 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
377 // Record a mapping of the OSR id to this PC. This is used if the OSR
378 // entry becomes the target of a bailout. We don't expect it to be, but
379 // we want it to work if it is.
380 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
381}
382
Ben Murdoch097c5b22016-05-18 11:27:45 +0100383void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
384 bool is_tail_call) {
385 // Pretend that the exit is a backwards jump to the entry.
386 int weight = 1;
387 if (info_->ShouldSelfOptimize()) {
388 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
389 } else {
390 int distance = masm_->pc_offset();
391 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
392 }
393 EmitProfilingCounterDecrement(weight);
394 Label ok;
395 __ j(positive, &ok, Label::kNear);
396 // Don't need to save result register if we are going to do a tail call.
397 if (!is_tail_call) {
398 __ push(eax);
399 }
400 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
401 if (!is_tail_call) {
402 __ pop(eax);
403 }
404 EmitProfilingCounterReset();
405 __ bind(&ok);
406}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000407
408void FullCodeGenerator::EmitReturnSequence() {
409 Comment cmnt(masm_, "[ Return sequence");
410 if (return_label_.is_bound()) {
411 __ jmp(&return_label_);
412 } else {
413 // Common return label
414 __ bind(&return_label_);
415 if (FLAG_trace) {
416 __ push(eax);
417 __ CallRuntime(Runtime::kTraceExit);
418 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100419 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000420
421 SetReturnPosition(literal());
422 __ leave();
423
424 int arg_count = info_->scope()->num_parameters() + 1;
425 int arguments_bytes = arg_count * kPointerSize;
426 __ Ret(arguments_bytes, ecx);
427 }
428}
429
430
431void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
432 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
433 MemOperand operand = codegen()->VarOperand(var, result_register());
434 // Memory operands can be pushed directly.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100435 codegen()->PushOperand(operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000436}
437
438
439void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
440 UNREACHABLE(); // Not used on IA32.
441}
442
443
444void FullCodeGenerator::AccumulatorValueContext::Plug(
445 Heap::RootListIndex index) const {
446 UNREACHABLE(); // Not used on IA32.
447}
448
449
450void FullCodeGenerator::StackValueContext::Plug(
451 Heap::RootListIndex index) const {
452 UNREACHABLE(); // Not used on IA32.
453}
454
455
456void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
457 UNREACHABLE(); // Not used on IA32.
458}
459
460
461void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
462}
463
464
465void FullCodeGenerator::AccumulatorValueContext::Plug(
466 Handle<Object> lit) const {
467 if (lit->IsSmi()) {
468 __ SafeMove(result_register(), Immediate(lit));
469 } else {
470 __ Move(result_register(), Immediate(lit));
471 }
472}
473
474
475void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100476 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000477 if (lit->IsSmi()) {
478 __ SafePush(Immediate(lit));
479 } else {
480 __ push(Immediate(lit));
481 }
482}
483
484
485void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
486 codegen()->PrepareForBailoutBeforeSplit(condition(),
487 true,
488 true_label_,
489 false_label_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100490 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000491 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
492 if (false_label_ != fall_through_) __ jmp(false_label_);
493 } else if (lit->IsTrue() || lit->IsJSObject()) {
494 if (true_label_ != fall_through_) __ jmp(true_label_);
495 } else if (lit->IsString()) {
496 if (String::cast(*lit)->length() == 0) {
497 if (false_label_ != fall_through_) __ jmp(false_label_);
498 } else {
499 if (true_label_ != fall_through_) __ jmp(true_label_);
500 }
501 } else if (lit->IsSmi()) {
502 if (Smi::cast(*lit)->value() == 0) {
503 if (false_label_ != fall_through_) __ jmp(false_label_);
504 } else {
505 if (true_label_ != fall_through_) __ jmp(true_label_);
506 }
507 } else {
508 // For simplicity we always test the accumulator register.
509 __ mov(result_register(), lit);
510 codegen()->DoTest(this);
511 }
512}
513
514
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000515void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
516 Register reg) const {
517 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100518 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 __ mov(Operand(esp, 0), reg);
520}
521
522
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000523void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
524 Label* materialize_false) const {
525 DCHECK(materialize_true == materialize_false);
526 __ bind(materialize_true);
527}
528
529
530void FullCodeGenerator::AccumulatorValueContext::Plug(
531 Label* materialize_true,
532 Label* materialize_false) const {
533 Label done;
534 __ bind(materialize_true);
535 __ mov(result_register(), isolate()->factory()->true_value());
536 __ jmp(&done, Label::kNear);
537 __ bind(materialize_false);
538 __ mov(result_register(), isolate()->factory()->false_value());
539 __ bind(&done);
540}
541
542
543void FullCodeGenerator::StackValueContext::Plug(
544 Label* materialize_true,
545 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100546 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000547 Label done;
548 __ bind(materialize_true);
549 __ push(Immediate(isolate()->factory()->true_value()));
550 __ jmp(&done, Label::kNear);
551 __ bind(materialize_false);
552 __ push(Immediate(isolate()->factory()->false_value()));
553 __ bind(&done);
554}
555
556
557void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
558 Label* materialize_false) const {
559 DCHECK(materialize_true == true_label_);
560 DCHECK(materialize_false == false_label_);
561}
562
563
564void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
565 Handle<Object> value = flag
566 ? isolate()->factory()->true_value()
567 : isolate()->factory()->false_value();
568 __ mov(result_register(), value);
569}
570
571
572void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100573 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 Handle<Object> value = flag
575 ? isolate()->factory()->true_value()
576 : isolate()->factory()->false_value();
577 __ push(Immediate(value));
578}
579
580
581void FullCodeGenerator::TestContext::Plug(bool flag) const {
582 codegen()->PrepareForBailoutBeforeSplit(condition(),
583 true,
584 true_label_,
585 false_label_);
586 if (flag) {
587 if (true_label_ != fall_through_) __ jmp(true_label_);
588 } else {
589 if (false_label_ != fall_through_) __ jmp(false_label_);
590 }
591}
592
593
594void FullCodeGenerator::DoTest(Expression* condition,
595 Label* if_true,
596 Label* if_false,
597 Label* fall_through) {
598 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
599 CallIC(ic, condition->test_id());
600 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
601 Split(equal, if_true, if_false, fall_through);
602}
603
604
605void FullCodeGenerator::Split(Condition cc,
606 Label* if_true,
607 Label* if_false,
608 Label* fall_through) {
609 if (if_false == fall_through) {
610 __ j(cc, if_true);
611 } else if (if_true == fall_through) {
612 __ j(NegateCondition(cc), if_false);
613 } else {
614 __ j(cc, if_true);
615 __ jmp(if_false);
616 }
617}
618
619
620MemOperand FullCodeGenerator::StackOperand(Variable* var) {
621 DCHECK(var->IsStackAllocated());
622 // Offset is negative because higher indexes are at lower addresses.
623 int offset = -var->index() * kPointerSize;
624 // Adjust by a (parameter or local) base offset.
625 if (var->IsParameter()) {
626 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
627 } else {
628 offset += JavaScriptFrameConstants::kLocal0Offset;
629 }
630 return Operand(ebp, offset);
631}
632
633
634MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
635 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
636 if (var->IsContextSlot()) {
637 int context_chain_length = scope()->ContextChainLength(var->scope());
638 __ LoadContext(scratch, context_chain_length);
639 return ContextOperand(scratch, var->index());
640 } else {
641 return StackOperand(var);
642 }
643}
644
645
646void FullCodeGenerator::GetVar(Register dest, Variable* var) {
647 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
648 MemOperand location = VarOperand(var, dest);
649 __ mov(dest, location);
650}
651
652
653void FullCodeGenerator::SetVar(Variable* var,
654 Register src,
655 Register scratch0,
656 Register scratch1) {
657 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
658 DCHECK(!scratch0.is(src));
659 DCHECK(!scratch0.is(scratch1));
660 DCHECK(!scratch1.is(src));
661 MemOperand location = VarOperand(var, scratch0);
662 __ mov(location, src);
663
664 // Emit the write barrier code if the location is in the heap.
665 if (var->IsContextSlot()) {
666 int offset = Context::SlotOffset(var->index());
667 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
668 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
669 }
670}
671
672
673void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
674 bool should_normalize,
675 Label* if_true,
676 Label* if_false) {
677 // Only prepare for bailouts before splits if we're in a test
678 // context. Otherwise, we let the Visit function deal with the
679 // preparation to avoid preparing with the same AST id twice.
680 if (!context()->IsTest()) return;
681
682 Label skip;
683 if (should_normalize) __ jmp(&skip, Label::kNear);
684 PrepareForBailout(expr, TOS_REG);
685 if (should_normalize) {
686 __ cmp(eax, isolate()->factory()->true_value());
687 Split(equal, if_true, if_false, NULL);
688 __ bind(&skip);
689 }
690}
691
692
693void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
694 // The variable in the declaration always resides in the current context.
695 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100696 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000697 // Check that we're not inside a with or catch context.
698 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
699 __ cmp(ebx, isolate()->factory()->with_context_map());
700 __ Check(not_equal, kDeclarationInWithContext);
701 __ cmp(ebx, isolate()->factory()->catch_context_map());
702 __ Check(not_equal, kDeclarationInCatchContext);
703 }
704}
705
706
707void FullCodeGenerator::VisitVariableDeclaration(
708 VariableDeclaration* declaration) {
709 // If it was not possible to allocate the variable at compile time, we
710 // need to "declare" it at runtime to make sure it actually exists in the
711 // local context.
712 VariableProxy* proxy = declaration->proxy();
713 VariableMode mode = declaration->mode();
714 Variable* variable = proxy->var();
715 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
716 switch (variable->location()) {
717 case VariableLocation::GLOBAL:
718 case VariableLocation::UNALLOCATED:
719 globals_->Add(variable->name(), zone());
720 globals_->Add(variable->binding_needs_init()
721 ? isolate()->factory()->the_hole_value()
722 : isolate()->factory()->undefined_value(), zone());
723 break;
724
725 case VariableLocation::PARAMETER:
726 case VariableLocation::LOCAL:
727 if (hole_init) {
728 Comment cmnt(masm_, "[ VariableDeclaration");
729 __ mov(StackOperand(variable),
730 Immediate(isolate()->factory()->the_hole_value()));
731 }
732 break;
733
734 case VariableLocation::CONTEXT:
735 if (hole_init) {
736 Comment cmnt(masm_, "[ VariableDeclaration");
737 EmitDebugCheckDeclarationContext(variable);
738 __ mov(ContextOperand(esi, variable->index()),
739 Immediate(isolate()->factory()->the_hole_value()));
740 // No write barrier since the hole value is in old space.
741 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
742 }
743 break;
744
745 case VariableLocation::LOOKUP: {
746 Comment cmnt(masm_, "[ VariableDeclaration");
747 __ push(Immediate(variable->name()));
748 // VariableDeclaration nodes are always introduced in one of four modes.
749 DCHECK(IsDeclaredVariableMode(mode));
750 // Push initial value, if any.
751 // Note: For variables we must not push an initial value (such as
752 // 'undefined') because we may have a (legal) redeclaration and we
753 // must not destroy the current value.
754 if (hole_init) {
755 __ push(Immediate(isolate()->factory()->the_hole_value()));
756 } else {
757 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
758 }
759 __ push(
760 Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
761 __ CallRuntime(Runtime::kDeclareLookupSlot);
762 break;
763 }
764 }
765}
766
767
768void FullCodeGenerator::VisitFunctionDeclaration(
769 FunctionDeclaration* declaration) {
770 VariableProxy* proxy = declaration->proxy();
771 Variable* variable = proxy->var();
772 switch (variable->location()) {
773 case VariableLocation::GLOBAL:
774 case VariableLocation::UNALLOCATED: {
775 globals_->Add(variable->name(), zone());
776 Handle<SharedFunctionInfo> function =
777 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
778 // Check for stack-overflow exception.
779 if (function.is_null()) return SetStackOverflow();
780 globals_->Add(function, zone());
781 break;
782 }
783
784 case VariableLocation::PARAMETER:
785 case VariableLocation::LOCAL: {
786 Comment cmnt(masm_, "[ FunctionDeclaration");
787 VisitForAccumulatorValue(declaration->fun());
788 __ mov(StackOperand(variable), result_register());
789 break;
790 }
791
792 case VariableLocation::CONTEXT: {
793 Comment cmnt(masm_, "[ FunctionDeclaration");
794 EmitDebugCheckDeclarationContext(variable);
795 VisitForAccumulatorValue(declaration->fun());
796 __ mov(ContextOperand(esi, variable->index()), result_register());
797 // We know that we have written a function, which is not a smi.
798 __ RecordWriteContextSlot(esi,
799 Context::SlotOffset(variable->index()),
800 result_register(),
801 ecx,
802 kDontSaveFPRegs,
803 EMIT_REMEMBERED_SET,
804 OMIT_SMI_CHECK);
805 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
806 break;
807 }
808
809 case VariableLocation::LOOKUP: {
810 Comment cmnt(masm_, "[ FunctionDeclaration");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100811 PushOperand(variable->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000812 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100813 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
814 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000815 break;
816 }
817 }
818}
819
820
821void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
822 // Call the runtime to declare the globals.
823 __ Push(pairs);
824 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
825 __ CallRuntime(Runtime::kDeclareGlobals);
826 // Return value is ignored.
827}
828
829
830void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
831 // Call the runtime to declare the modules.
832 __ Push(descriptions);
833 __ CallRuntime(Runtime::kDeclareModules);
834 // Return value is ignored.
835}
836
837
838void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
839 Comment cmnt(masm_, "[ SwitchStatement");
840 Breakable nested_statement(this, stmt);
841 SetStatementPosition(stmt);
842
843 // Keep the switch value on the stack until a case matches.
844 VisitForStackValue(stmt->tag());
845 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
846
847 ZoneList<CaseClause*>* clauses = stmt->cases();
848 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
849
850 Label next_test; // Recycled for each test.
851 // Compile all the tests with branches to their bodies.
852 for (int i = 0; i < clauses->length(); i++) {
853 CaseClause* clause = clauses->at(i);
854 clause->body_target()->Unuse();
855
856 // The default is not a test, but remember it as final fall through.
857 if (clause->is_default()) {
858 default_clause = clause;
859 continue;
860 }
861
862 Comment cmnt(masm_, "[ Case comparison");
863 __ bind(&next_test);
864 next_test.Unuse();
865
866 // Compile the label expression.
867 VisitForAccumulatorValue(clause->label());
868
869 // Perform the comparison as if via '==='.
870 __ mov(edx, Operand(esp, 0)); // Switch value.
871 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
872 JumpPatchSite patch_site(masm_);
873 if (inline_smi_code) {
874 Label slow_case;
875 __ mov(ecx, edx);
876 __ or_(ecx, eax);
877 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
878
879 __ cmp(edx, eax);
880 __ j(not_equal, &next_test);
881 __ Drop(1); // Switch value is no longer needed.
882 __ jmp(clause->body_target());
883 __ bind(&slow_case);
884 }
885
886 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100887 Handle<Code> ic =
888 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000889 CallIC(ic, clause->CompareId());
890 patch_site.EmitPatchInfo();
891
892 Label skip;
893 __ jmp(&skip, Label::kNear);
894 PrepareForBailout(clause, TOS_REG);
895 __ cmp(eax, isolate()->factory()->true_value());
896 __ j(not_equal, &next_test);
897 __ Drop(1);
898 __ jmp(clause->body_target());
899 __ bind(&skip);
900
901 __ test(eax, eax);
902 __ j(not_equal, &next_test);
903 __ Drop(1); // Switch value is no longer needed.
904 __ jmp(clause->body_target());
905 }
906
907 // Discard the test value and jump to the default if present, otherwise to
908 // the end of the statement.
909 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100910 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000911 if (default_clause == NULL) {
912 __ jmp(nested_statement.break_label());
913 } else {
914 __ jmp(default_clause->body_target());
915 }
916
917 // Compile all the case bodies.
918 for (int i = 0; i < clauses->length(); i++) {
919 Comment cmnt(masm_, "[ Case body");
920 CaseClause* clause = clauses->at(i);
921 __ bind(clause->body_target());
922 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
923 VisitStatements(clause->statements());
924 }
925
926 __ bind(nested_statement.break_label());
927 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
928}
929
930
931void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
932 Comment cmnt(masm_, "[ ForInStatement");
933 SetStatementPosition(stmt, SKIP_BREAK);
934
935 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
936
937 Label loop, exit;
938 ForIn loop_statement(this, stmt);
939 increment_loop_depth();
940
Ben Murdoch097c5b22016-05-18 11:27:45 +0100941 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942 SetExpressionAsStatementPosition(stmt->enumerable());
943 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100944 OperandStackDepthIncrement(ForIn::kElementCount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000945
Ben Murdoch097c5b22016-05-18 11:27:45 +0100946 // If the object is null or undefined, skip over the loop, otherwise convert
947 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948 Label convert, done_convert;
949 __ JumpIfSmi(eax, &convert, Label::kNear);
950 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
951 __ j(above_equal, &done_convert, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100952 __ cmp(eax, isolate()->factory()->undefined_value());
953 __ j(equal, &exit);
954 __ cmp(eax, isolate()->factory()->null_value());
955 __ j(equal, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000956 __ bind(&convert);
957 ToObjectStub stub(isolate());
958 __ CallStub(&stub);
959 __ bind(&done_convert);
960 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
961 __ push(eax);
962
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000963 // Check cache validity in generated code. This is a fast case for
964 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
965 // guarantee cache validity, call the runtime system to check cache
966 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100967 // Note: Proxies never have an enum cache, so will always take the
968 // slow path.
969 Label call_runtime, use_cache, fixed_array;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000970 __ CheckEnumCache(&call_runtime);
971
972 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
973 __ jmp(&use_cache, Label::kNear);
974
975 // Get the set of properties to enumerate.
976 __ bind(&call_runtime);
977 __ push(eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100978 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000979 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
980 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
981 isolate()->factory()->meta_map());
982 __ j(not_equal, &fixed_array);
983
984
985 // We got a map in register eax. Get the enumeration cache from it.
986 Label no_descriptors;
987 __ bind(&use_cache);
988
989 __ EnumLength(edx, eax);
990 __ cmp(edx, Immediate(Smi::FromInt(0)));
991 __ j(equal, &no_descriptors);
992
993 __ LoadInstanceDescriptors(eax, ecx);
994 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
995 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
996
997 // Set up the four remaining stack slots.
998 __ push(eax); // Map.
999 __ push(ecx); // Enumeration cache.
1000 __ push(edx); // Number of valid entries for the map in the enum cache.
1001 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1002 __ jmp(&loop);
1003
1004 __ bind(&no_descriptors);
1005 __ add(esp, Immediate(kPointerSize));
1006 __ jmp(&exit);
1007
1008 // We got a fixed array in register eax. Iterate through that.
1009 __ bind(&fixed_array);
1010
1011 // No need for a write barrier, we are storing a Smi in the feedback vector.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001012 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001013 __ EmitLoadTypeFeedbackVector(ebx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001014 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1015 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1016 __ push(Immediate(Smi::FromInt(1))); // Smi(1) indicates slow check
1017 __ push(eax); // Array
1018 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1019 __ push(eax); // Fixed array length (as smi).
Ben Murdoch097c5b22016-05-18 11:27:45 +01001020 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001021 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1022
1023 // Generate code for doing the condition check.
1024 __ bind(&loop);
1025 SetExpressionAsStatementPosition(stmt->each());
1026
1027 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1028 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1029 __ j(above_equal, loop_statement.break_label());
1030
1031 // Get the current entry of the array into register ebx.
1032 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1033 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1034
1035 // Get the expected map from the stack or a smi in the
1036 // permanent slow case into register edx.
1037 __ mov(edx, Operand(esp, 3 * kPointerSize));
1038
1039 // Check if the expected map still matches that of the enumerable.
1040 // If not, we may have to filter the key.
1041 Label update_each;
1042 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1043 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1044 __ j(equal, &update_each, Label::kNear);
1045
Ben Murdoch097c5b22016-05-18 11:27:45 +01001046 // We might get here from TurboFan or Crankshaft when something in the
1047 // for-in loop body deopts and only now notice in fullcodegen, that we
1048 // can now longer use the enum cache, i.e. left fast mode. So better record
1049 // this information here, in case we later OSR back into this loop or
1050 // reoptimize the whole function w/o rerunning the loop with the slow
1051 // mode object in fullcodegen (which would result in a deopt loop).
1052 __ EmitLoadTypeFeedbackVector(edx);
1053 __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1054 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1055
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001056 // Convert the entry to a string or null if it isn't a property
1057 // anymore. If the property has been removed while iterating, we
1058 // just skip it.
1059 __ push(ecx); // Enumerable.
1060 __ push(ebx); // Current entry.
1061 __ CallRuntime(Runtime::kForInFilter);
1062 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1063 __ cmp(eax, isolate()->factory()->undefined_value());
1064 __ j(equal, loop_statement.continue_label());
1065 __ mov(ebx, eax);
1066
1067 // Update the 'each' property or variable from the possibly filtered
1068 // entry in register ebx.
1069 __ bind(&update_each);
1070 __ mov(result_register(), ebx);
1071 // Perform the assignment as if via '='.
1072 { EffectContext context(this);
1073 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1074 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1075 }
1076
1077 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1078 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1079 // Generate code for the body of the loop.
1080 Visit(stmt->body());
1081
1082 // Generate code for going to the next element by incrementing the
1083 // index (smi) stored on top of the stack.
1084 __ bind(loop_statement.continue_label());
1085 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1086
1087 EmitBackEdgeBookkeeping(stmt, &loop);
1088 __ jmp(&loop);
1089
1090 // Remove the pointers stored on the stack.
1091 __ bind(loop_statement.break_label());
1092 __ add(esp, Immediate(5 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001093 OperandStackDepthDecrement(ForIn::kElementCount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001094
1095 // Exit and decrement the loop depth.
1096 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1097 __ bind(&exit);
1098 decrement_loop_depth();
1099}
1100
1101
1102void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1103 bool pretenure) {
1104 // Use the fast case closure allocation code that allocates in new
1105 // space for nested functions that don't need literals cloning. If
1106 // we're running with the --always-opt or the --prepare-always-opt
1107 // flag, we need to use the runtime function so that the new function
1108 // we are creating here gets a chance to have its code optimized and
1109 // doesn't just get a copy of the existing unoptimized code.
1110 if (!FLAG_always_opt &&
1111 !FLAG_prepare_always_opt &&
1112 !pretenure &&
1113 scope()->is_function_scope() &&
1114 info->num_literals() == 0) {
1115 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1116 __ mov(ebx, Immediate(info));
1117 __ CallStub(&stub);
1118 } else {
1119 __ push(Immediate(info));
1120 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1121 : Runtime::kNewClosure);
1122 }
1123 context()->Plug(eax);
1124}
1125
1126
1127void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1128 FeedbackVectorSlot slot) {
1129 DCHECK(NeedsHomeObject(initializer));
1130 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1131 __ mov(StoreDescriptor::NameRegister(),
1132 Immediate(isolate()->factory()->home_object_symbol()));
1133 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1134 EmitLoadStoreICSlot(slot);
1135 CallStoreIC();
1136}
1137
1138
1139void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1140 int offset,
1141 FeedbackVectorSlot slot) {
1142 DCHECK(NeedsHomeObject(initializer));
1143 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1144 __ mov(StoreDescriptor::NameRegister(),
1145 Immediate(isolate()->factory()->home_object_symbol()));
1146 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1147 EmitLoadStoreICSlot(slot);
1148 CallStoreIC();
1149}
1150
1151
1152void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1153 TypeofMode typeof_mode,
1154 Label* slow) {
1155 Register context = esi;
1156 Register temp = edx;
1157
1158 Scope* s = scope();
1159 while (s != NULL) {
1160 if (s->num_heap_slots() > 0) {
1161 if (s->calls_sloppy_eval()) {
1162 // Check that extension is "the hole".
1163 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1164 Heap::kTheHoleValueRootIndex, slow);
1165 }
1166 // Load next context in chain.
1167 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1168 // Walk the rest of the chain without clobbering esi.
1169 context = temp;
1170 }
1171 // If no outer scope calls eval, we do not need to check more
1172 // context extensions. If we have reached an eval scope, we check
1173 // all extensions from this point.
1174 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1175 s = s->outer_scope();
1176 }
1177
1178 if (s != NULL && s->is_eval_scope()) {
1179 // Loop up the context chain. There is no frame effect so it is
1180 // safe to use raw labels here.
1181 Label next, fast;
1182 if (!context.is(temp)) {
1183 __ mov(temp, context);
1184 }
1185 __ bind(&next);
1186 // Terminate at native context.
1187 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1188 Immediate(isolate()->factory()->native_context_map()));
1189 __ j(equal, &fast, Label::kNear);
1190 // Check that extension is "the hole".
1191 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1192 Heap::kTheHoleValueRootIndex, slow);
1193 // Load next context in chain.
1194 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1195 __ jmp(&next);
1196 __ bind(&fast);
1197 }
1198
1199 // All extension objects were empty and it is safe to use a normal global
1200 // load machinery.
1201 EmitGlobalVariableLoad(proxy, typeof_mode);
1202}
1203
1204
1205MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1206 Label* slow) {
1207 DCHECK(var->IsContextSlot());
1208 Register context = esi;
1209 Register temp = ebx;
1210
1211 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1212 if (s->num_heap_slots() > 0) {
1213 if (s->calls_sloppy_eval()) {
1214 // Check that extension is "the hole".
1215 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1216 Heap::kTheHoleValueRootIndex, slow);
1217 }
1218 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1219 // Walk the rest of the chain without clobbering esi.
1220 context = temp;
1221 }
1222 }
1223 // Check that last extension is "the hole".
1224 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1225 Heap::kTheHoleValueRootIndex, slow);
1226
1227 // This function is used only for loads, not stores, so it's safe to
1228 // return an esi-based operand (the write barrier cannot be allowed to
1229 // destroy the esi register).
1230 return ContextOperand(context, var->index());
1231}
1232
1233
1234void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1235 TypeofMode typeof_mode,
1236 Label* slow, Label* done) {
1237 // Generate fast-case code for variables that might be shadowed by
1238 // eval-introduced variables. Eval is used a lot without
1239 // introducing variables. In those cases, we do not want to
1240 // perform a runtime call for all variables in the scope
1241 // containing the eval.
1242 Variable* var = proxy->var();
1243 if (var->mode() == DYNAMIC_GLOBAL) {
1244 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1245 __ jmp(done);
1246 } else if (var->mode() == DYNAMIC_LOCAL) {
1247 Variable* local = var->local_if_not_shadowed();
1248 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1249 if (local->mode() == LET || local->mode() == CONST ||
1250 local->mode() == CONST_LEGACY) {
1251 __ cmp(eax, isolate()->factory()->the_hole_value());
1252 __ j(not_equal, done);
1253 if (local->mode() == CONST_LEGACY) {
1254 __ mov(eax, isolate()->factory()->undefined_value());
1255 } else { // LET || CONST
1256 __ push(Immediate(var->name()));
1257 __ CallRuntime(Runtime::kThrowReferenceError);
1258 }
1259 }
1260 __ jmp(done);
1261 }
1262}
1263
1264
1265void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1266 TypeofMode typeof_mode) {
1267 Variable* var = proxy->var();
1268 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1269 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1270 __ mov(LoadDescriptor::ReceiverRegister(), NativeContextOperand());
1271 __ mov(LoadDescriptor::ReceiverRegister(),
1272 ContextOperand(LoadDescriptor::ReceiverRegister(),
1273 Context::EXTENSION_INDEX));
1274 __ mov(LoadDescriptor::NameRegister(), var->name());
1275 __ mov(LoadDescriptor::SlotRegister(),
1276 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1277 CallLoadIC(typeof_mode);
1278}
1279
1280
1281void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1282 TypeofMode typeof_mode) {
1283 SetExpressionPosition(proxy);
1284 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1285 Variable* var = proxy->var();
1286
1287 // Three cases: global variables, lookup variables, and all other types of
1288 // variables.
1289 switch (var->location()) {
1290 case VariableLocation::GLOBAL:
1291 case VariableLocation::UNALLOCATED: {
1292 Comment cmnt(masm_, "[ Global variable");
1293 EmitGlobalVariableLoad(proxy, typeof_mode);
1294 context()->Plug(eax);
1295 break;
1296 }
1297
1298 case VariableLocation::PARAMETER:
1299 case VariableLocation::LOCAL:
1300 case VariableLocation::CONTEXT: {
1301 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1302 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1303 : "[ Stack variable");
1304
1305 if (NeedsHoleCheckForLoad(proxy)) {
1306 // Let and const need a read barrier.
1307 Label done;
1308 GetVar(eax, var);
1309 __ cmp(eax, isolate()->factory()->the_hole_value());
1310 __ j(not_equal, &done, Label::kNear);
1311 if (var->mode() == LET || var->mode() == CONST) {
1312 // Throw a reference error when using an uninitialized let/const
1313 // binding in harmony mode.
1314 __ push(Immediate(var->name()));
1315 __ CallRuntime(Runtime::kThrowReferenceError);
1316 } else {
1317 // Uninitialized legacy const bindings are unholed.
1318 DCHECK(var->mode() == CONST_LEGACY);
1319 __ mov(eax, isolate()->factory()->undefined_value());
1320 }
1321 __ bind(&done);
1322 context()->Plug(eax);
1323 break;
1324 }
1325 context()->Plug(var);
1326 break;
1327 }
1328
1329 case VariableLocation::LOOKUP: {
1330 Comment cmnt(masm_, "[ Lookup variable");
1331 Label done, slow;
1332 // Generate code for loading from variables potentially shadowed
1333 // by eval-introduced variables.
1334 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1335 __ bind(&slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336 __ push(Immediate(var->name()));
1337 Runtime::FunctionId function_id =
1338 typeof_mode == NOT_INSIDE_TYPEOF
1339 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001340 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001341 __ CallRuntime(function_id);
1342 __ bind(&done);
1343 context()->Plug(eax);
1344 break;
1345 }
1346 }
1347}
1348
1349
1350void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1351 Comment cmnt(masm_, "[ RegExpLiteral");
1352 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1353 __ Move(eax, Immediate(Smi::FromInt(expr->literal_index())));
1354 __ Move(ecx, Immediate(expr->pattern()));
1355 __ Move(edx, Immediate(Smi::FromInt(expr->flags())));
1356 FastCloneRegExpStub stub(isolate());
1357 __ CallStub(&stub);
1358 context()->Plug(eax);
1359}
1360
1361
1362void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1363 Expression* expression = (property == NULL) ? NULL : property->value();
1364 if (expression == NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001365 PushOperand(isolate()->factory()->null_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001366 } else {
1367 VisitForStackValue(expression);
1368 if (NeedsHomeObject(expression)) {
1369 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1370 property->kind() == ObjectLiteral::Property::SETTER);
1371 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1372 EmitSetHomeObject(expression, offset, property->GetSlot());
1373 }
1374 }
1375}
1376
1377
1378void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1379 Comment cmnt(masm_, "[ ObjectLiteral");
1380
1381 Handle<FixedArray> constant_properties = expr->constant_properties();
1382 int flags = expr->ComputeFlags();
1383 // If any of the keys would store to the elements array, then we shouldn't
1384 // allow it.
1385 if (MustCreateObjectLiteralWithRuntime(expr)) {
1386 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1387 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1388 __ push(Immediate(constant_properties));
1389 __ push(Immediate(Smi::FromInt(flags)));
1390 __ CallRuntime(Runtime::kCreateObjectLiteral);
1391 } else {
1392 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1393 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1394 __ mov(ecx, Immediate(constant_properties));
1395 __ mov(edx, Immediate(Smi::FromInt(flags)));
1396 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1397 __ CallStub(&stub);
1398 }
1399 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1400
1401 // If result_saved is true the result is on top of the stack. If
1402 // result_saved is false the result is in eax.
1403 bool result_saved = false;
1404
1405 AccessorTable accessor_table(zone());
1406 int property_index = 0;
1407 for (; property_index < expr->properties()->length(); property_index++) {
1408 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1409 if (property->is_computed_name()) break;
1410 if (property->IsCompileTimeValue()) continue;
1411
1412 Literal* key = property->key()->AsLiteral();
1413 Expression* value = property->value();
1414 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001415 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001416 result_saved = true;
1417 }
1418 switch (property->kind()) {
1419 case ObjectLiteral::Property::CONSTANT:
1420 UNREACHABLE();
1421 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1422 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1423 // Fall through.
1424 case ObjectLiteral::Property::COMPUTED:
1425 // It is safe to use [[Put]] here because the boilerplate already
1426 // contains computed properties with an uninitialized value.
1427 if (key->value()->IsInternalizedString()) {
1428 if (property->emit_store()) {
1429 VisitForAccumulatorValue(value);
1430 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1431 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1432 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1433 EmitLoadStoreICSlot(property->GetSlot(0));
1434 CallStoreIC();
1435 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1436 if (NeedsHomeObject(value)) {
1437 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1438 }
1439 } else {
1440 VisitForEffect(value);
1441 }
1442 break;
1443 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001444 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001445 VisitForStackValue(key);
1446 VisitForStackValue(value);
1447 if (property->emit_store()) {
1448 if (NeedsHomeObject(value)) {
1449 EmitSetHomeObject(value, 2, property->GetSlot());
1450 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001451 PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1452 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001453 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001454 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001455 }
1456 break;
1457 case ObjectLiteral::Property::PROTOTYPE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001458 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001459 VisitForStackValue(value);
1460 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001461 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001462 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1463 NO_REGISTERS);
1464 break;
1465 case ObjectLiteral::Property::GETTER:
1466 if (property->emit_store()) {
1467 accessor_table.lookup(key)->second->getter = property;
1468 }
1469 break;
1470 case ObjectLiteral::Property::SETTER:
1471 if (property->emit_store()) {
1472 accessor_table.lookup(key)->second->setter = property;
1473 }
1474 break;
1475 }
1476 }
1477
1478 // Emit code to define accessors, using only a single call to the runtime for
1479 // each pair of corresponding getters and setters.
1480 for (AccessorTable::Iterator it = accessor_table.begin();
1481 it != accessor_table.end();
1482 ++it) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001483 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 VisitForStackValue(it->first);
1485
1486 EmitAccessor(it->second->getter);
1487 EmitAccessor(it->second->setter);
1488
Ben Murdoch097c5b22016-05-18 11:27:45 +01001489 PushOperand(Smi::FromInt(NONE));
1490 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001491 }
1492
1493 // Object literals have two parts. The "static" part on the left contains no
1494 // computed property names, and so we can compute its map ahead of time; see
1495 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1496 // starts with the first computed property name, and continues with all
1497 // properties to its right. All the code from above initializes the static
1498 // component of the object literal, and arranges for the map of the result to
1499 // reflect the static order in which the keys appear. For the dynamic
1500 // properties, we compile them into a series of "SetOwnProperty" runtime
1501 // calls. This will preserve insertion order.
1502 for (; property_index < expr->properties()->length(); property_index++) {
1503 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1504
1505 Expression* value = property->value();
1506 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001507 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001508 result_saved = true;
1509 }
1510
Ben Murdoch097c5b22016-05-18 11:27:45 +01001511 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001512
1513 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1514 DCHECK(!property->is_computed_name());
1515 VisitForStackValue(value);
1516 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001517 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1519 NO_REGISTERS);
1520 } else {
1521 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1522 VisitForStackValue(value);
1523 if (NeedsHomeObject(value)) {
1524 EmitSetHomeObject(value, 2, property->GetSlot());
1525 }
1526
1527 switch (property->kind()) {
1528 case ObjectLiteral::Property::CONSTANT:
1529 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1530 case ObjectLiteral::Property::COMPUTED:
1531 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001532 PushOperand(Smi::FromInt(NONE));
1533 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1534 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001536 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537 }
1538 break;
1539
1540 case ObjectLiteral::Property::PROTOTYPE:
1541 UNREACHABLE();
1542 break;
1543
1544 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001545 PushOperand(Smi::FromInt(NONE));
1546 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001547 break;
1548
1549 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001550 PushOperand(Smi::FromInt(NONE));
1551 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552 break;
1553 }
1554 }
1555 }
1556
1557 if (expr->has_function()) {
1558 DCHECK(result_saved);
1559 __ push(Operand(esp, 0));
1560 __ CallRuntime(Runtime::kToFastProperties);
1561 }
1562
1563 if (result_saved) {
1564 context()->PlugTOS();
1565 } else {
1566 context()->Plug(eax);
1567 }
1568}
1569
1570
1571void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1572 Comment cmnt(masm_, "[ ArrayLiteral");
1573
1574 Handle<FixedArray> constant_elements = expr->constant_elements();
1575 bool has_constant_fast_elements =
1576 IsFastObjectElementsKind(expr->constant_elements_kind());
1577
1578 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1579 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1580 // If the only customer of allocation sites is transitioning, then
1581 // we can turn it off if we don't have anywhere else to transition to.
1582 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1583 }
1584
1585 if (MustCreateArrayLiteralWithRuntime(expr)) {
1586 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1587 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1588 __ push(Immediate(constant_elements));
1589 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1590 __ CallRuntime(Runtime::kCreateArrayLiteral);
1591 } else {
1592 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1593 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1594 __ mov(ecx, Immediate(constant_elements));
1595 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1596 __ CallStub(&stub);
1597 }
1598 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1599
1600 bool result_saved = false; // Is the result saved to the stack?
1601 ZoneList<Expression*>* subexprs = expr->values();
1602 int length = subexprs->length();
1603
1604 // Emit code to evaluate all the non-constant subexpressions and to store
1605 // them into the newly cloned array.
1606 int array_index = 0;
1607 for (; array_index < length; array_index++) {
1608 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001609 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610
1611 // If the subexpression is a literal or a simple materialized literal it
1612 // is already set in the cloned array.
1613 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1614
1615 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001616 PushOperand(eax); // array literal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 result_saved = true;
1618 }
1619 VisitForAccumulatorValue(subexpr);
1620
1621 __ mov(StoreDescriptor::NameRegister(),
1622 Immediate(Smi::FromInt(array_index)));
1623 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1624 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1625 Handle<Code> ic =
1626 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1627 CallIC(ic);
1628 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1629 }
1630
1631 // In case the array literal contains spread expressions it has two parts. The
1632 // first part is the "static" array which has a literal index is handled
1633 // above. The second part is the part after the first spread expression
1634 // (inclusive) and these elements gets appended to the array. Note that the
1635 // number elements an iterable produces is unknown ahead of time.
1636 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001637 PopOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001638 result_saved = false;
1639 }
1640 for (; array_index < length; array_index++) {
1641 Expression* subexpr = subexprs->at(array_index);
1642
Ben Murdoch097c5b22016-05-18 11:27:45 +01001643 PushOperand(eax);
1644 DCHECK(!subexpr->IsSpread());
1645 VisitForStackValue(subexpr);
1646 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001647
1648 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1649 }
1650
1651 if (result_saved) {
1652 context()->PlugTOS();
1653 } else {
1654 context()->Plug(eax);
1655 }
1656}
1657
1658
1659void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1660 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1661
1662 Comment cmnt(masm_, "[ Assignment");
1663 SetExpressionPosition(expr, INSERT_BREAK);
1664
1665 Property* property = expr->target()->AsProperty();
1666 LhsKind assign_type = Property::GetAssignType(property);
1667
1668 // Evaluate LHS expression.
1669 switch (assign_type) {
1670 case VARIABLE:
1671 // Nothing to do here.
1672 break;
1673 case NAMED_SUPER_PROPERTY:
1674 VisitForStackValue(
1675 property->obj()->AsSuperPropertyReference()->this_var());
1676 VisitForAccumulatorValue(
1677 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001678 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001679 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001680 PushOperand(MemOperand(esp, kPointerSize));
1681 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001682 }
1683 break;
1684 case NAMED_PROPERTY:
1685 if (expr->is_compound()) {
1686 // We need the receiver both on the stack and in the register.
1687 VisitForStackValue(property->obj());
1688 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1689 } else {
1690 VisitForStackValue(property->obj());
1691 }
1692 break;
1693 case KEYED_SUPER_PROPERTY:
1694 VisitForStackValue(
1695 property->obj()->AsSuperPropertyReference()->this_var());
1696 VisitForStackValue(
1697 property->obj()->AsSuperPropertyReference()->home_object());
1698 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001699 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001700 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001701 PushOperand(MemOperand(esp, 2 * kPointerSize));
1702 PushOperand(MemOperand(esp, 2 * kPointerSize));
1703 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001704 }
1705 break;
1706 case KEYED_PROPERTY: {
1707 if (expr->is_compound()) {
1708 VisitForStackValue(property->obj());
1709 VisitForStackValue(property->key());
1710 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1711 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1712 } else {
1713 VisitForStackValue(property->obj());
1714 VisitForStackValue(property->key());
1715 }
1716 break;
1717 }
1718 }
1719
1720 // For compound assignments we need another deoptimization point after the
1721 // variable/property load.
1722 if (expr->is_compound()) {
1723 AccumulatorValueContext result_context(this);
1724 { AccumulatorValueContext left_operand_context(this);
1725 switch (assign_type) {
1726 case VARIABLE:
1727 EmitVariableLoad(expr->target()->AsVariableProxy());
1728 PrepareForBailout(expr->target(), TOS_REG);
1729 break;
1730 case NAMED_SUPER_PROPERTY:
1731 EmitNamedSuperPropertyLoad(property);
1732 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1733 break;
1734 case NAMED_PROPERTY:
1735 EmitNamedPropertyLoad(property);
1736 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1737 break;
1738 case KEYED_SUPER_PROPERTY:
1739 EmitKeyedSuperPropertyLoad(property);
1740 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1741 break;
1742 case KEYED_PROPERTY:
1743 EmitKeyedPropertyLoad(property);
1744 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1745 break;
1746 }
1747 }
1748
1749 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001750 PushOperand(eax); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 VisitForAccumulatorValue(expr->value());
1752
1753 if (ShouldInlineSmiCase(op)) {
1754 EmitInlineSmiBinaryOp(expr->binary_operation(),
1755 op,
1756 expr->target(),
1757 expr->value());
1758 } else {
1759 EmitBinaryOp(expr->binary_operation(), op);
1760 }
1761
1762 // Deoptimization point in case the binary operation may have side effects.
1763 PrepareForBailout(expr->binary_operation(), TOS_REG);
1764 } else {
1765 VisitForAccumulatorValue(expr->value());
1766 }
1767
1768 SetExpressionPosition(expr);
1769
1770 // Store the value.
1771 switch (assign_type) {
1772 case VARIABLE:
1773 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1774 expr->op(), expr->AssignmentSlot());
1775 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1776 context()->Plug(eax);
1777 break;
1778 case NAMED_PROPERTY:
1779 EmitNamedPropertyAssignment(expr);
1780 break;
1781 case NAMED_SUPER_PROPERTY:
1782 EmitNamedSuperPropertyStore(property);
1783 context()->Plug(result_register());
1784 break;
1785 case KEYED_SUPER_PROPERTY:
1786 EmitKeyedSuperPropertyStore(property);
1787 context()->Plug(result_register());
1788 break;
1789 case KEYED_PROPERTY:
1790 EmitKeyedPropertyAssignment(expr);
1791 break;
1792 }
1793}
1794
1795
1796void FullCodeGenerator::VisitYield(Yield* expr) {
1797 Comment cmnt(masm_, "[ Yield");
1798 SetExpressionPosition(expr);
1799
1800 // Evaluate yielded value first; the initial iterator definition depends on
1801 // this. It stays on the stack while we update the iterator.
1802 VisitForStackValue(expr->expression());
1803
1804 switch (expr->yield_kind()) {
1805 case Yield::kSuspend:
1806 // Pop value from top-of-stack slot; box result into result register.
1807 EmitCreateIteratorResult(false);
1808 __ push(result_register());
1809 // Fall through.
1810 case Yield::kInitial: {
1811 Label suspend, continuation, post_runtime, resume;
1812
1813 __ jmp(&suspend);
1814 __ bind(&continuation);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001815 // When we arrive here, the stack top is the resume mode and
1816 // result_register() holds the input value (the argument given to the
1817 // respective resume operation).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001818 __ RecordGeneratorContinuation();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001819 __ pop(ebx);
1820 __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::RETURN)));
1821 __ j(not_equal, &resume);
1822 __ push(result_register());
1823 EmitCreateIteratorResult(true);
1824 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001825
1826 __ bind(&suspend);
1827 VisitForAccumulatorValue(expr->generator_object());
1828 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1829 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1830 Immediate(Smi::FromInt(continuation.pos())));
1831 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1832 __ mov(ecx, esi);
1833 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1834 kDontSaveFPRegs);
1835 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1836 __ cmp(esp, ebx);
1837 __ j(equal, &post_runtime);
1838 __ push(eax); // generator object
1839 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1840 __ mov(context_register(),
1841 Operand(ebp, StandardFrameConstants::kContextOffset));
1842 __ bind(&post_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001843 PopOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001844 EmitReturnSequence();
1845
1846 __ bind(&resume);
1847 context()->Plug(result_register());
1848 break;
1849 }
1850
1851 case Yield::kFinal: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001852 // Pop value from top-of-stack slot, box result into result register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001853 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001854 EmitCreateIteratorResult(true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001855 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001856 break;
1857 }
1858
Ben Murdoch097c5b22016-05-18 11:27:45 +01001859 case Yield::kDelegating:
1860 UNREACHABLE();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001861 }
1862}
1863
1864
1865void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1866 Expression *value,
1867 JSGeneratorObject::ResumeMode resume_mode) {
1868 // The value stays in eax, and is ultimately read by the resumed generator, as
1869 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1870 // is read to throw the value when the resumed generator is already closed.
1871 // ebx will hold the generator object until the activation has been resumed.
1872 VisitForStackValue(generator);
1873 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001874 PopOperand(ebx);
1875
1876 // Store input value into generator object.
1877 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOffset), result_register());
1878 __ mov(ecx, result_register());
1879 __ RecordWriteField(ebx, JSGeneratorObject::kInputOffset, ecx, edx,
1880 kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001881
1882 // Load suspended function and context.
1883 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
1884 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
1885
1886 // Push receiver.
1887 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
1888
1889 // Push holes for arguments to generator function.
1890 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1891 __ mov(edx,
1892 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1893 __ mov(ecx, isolate()->factory()->the_hole_value());
1894 Label push_argument_holes, push_frame;
1895 __ bind(&push_argument_holes);
1896 __ sub(edx, Immediate(Smi::FromInt(1)));
1897 __ j(carry, &push_frame);
1898 __ push(ecx);
1899 __ jmp(&push_argument_holes);
1900
1901 // Enter a new JavaScript frame, and initialize its slots as they were when
1902 // the generator was suspended.
1903 Label resume_frame, done;
1904 __ bind(&push_frame);
1905 __ call(&resume_frame);
1906 __ jmp(&done);
1907 __ bind(&resume_frame);
1908 __ push(ebp); // Caller's frame pointer.
1909 __ mov(ebp, esp);
1910 __ push(esi); // Callee's context.
1911 __ push(edi); // Callee's JS Function.
1912
1913 // Load the operand stack size.
1914 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
1915 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
1916 __ SmiUntag(edx);
1917
1918 // If we are sending a value and there is no operand stack, we can jump back
1919 // in directly.
1920 if (resume_mode == JSGeneratorObject::NEXT) {
1921 Label slow_resume;
1922 __ cmp(edx, Immediate(0));
1923 __ j(not_zero, &slow_resume);
1924 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
1925 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
1926 __ SmiUntag(ecx);
1927 __ add(edx, ecx);
1928 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
1929 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001930 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001931 __ jmp(edx);
1932 __ bind(&slow_resume);
1933 }
1934
1935 // Otherwise, we push holes for the operand stack and call the runtime to fix
1936 // up the stack and the handlers.
1937 Label push_operand_holes, call_resume;
1938 __ bind(&push_operand_holes);
1939 __ sub(edx, Immediate(1));
1940 __ j(carry, &call_resume);
1941 __ push(ecx);
1942 __ jmp(&push_operand_holes);
1943 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001944 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001945 __ push(ebx);
1946 __ push(result_register());
1947 __ Push(Smi::FromInt(resume_mode));
1948 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
1949 // Not reached: the runtime call returns elsewhere.
1950 __ Abort(kGeneratorFailedToResume);
1951
1952 __ bind(&done);
1953 context()->Plug(result_register());
1954}
1955
Ben Murdoch097c5b22016-05-18 11:27:45 +01001956void FullCodeGenerator::PushOperand(MemOperand operand) {
1957 OperandStackDepthIncrement(1);
1958 __ Push(operand);
1959}
1960
1961void FullCodeGenerator::EmitOperandStackDepthCheck() {
1962 if (FLAG_debug_code) {
1963 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1964 operand_stack_depth_ * kPointerSize;
1965 __ mov(eax, ebp);
1966 __ sub(eax, esp);
1967 __ cmp(eax, Immediate(expected_diff));
1968 __ Assert(equal, kUnexpectedStackDepth);
1969 }
1970}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001971
1972void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1973 Label allocate, done_allocate;
1974
1975 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate, TAG_OBJECT);
1976 __ jmp(&done_allocate, Label::kNear);
1977
1978 __ bind(&allocate);
1979 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1980 __ CallRuntime(Runtime::kAllocateInNewSpace);
1981
1982 __ bind(&done_allocate);
1983 __ mov(ebx, NativeContextOperand());
1984 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1985 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1986 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1987 isolate()->factory()->empty_fixed_array());
1988 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1989 isolate()->factory()->empty_fixed_array());
1990 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1991 __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1992 isolate()->factory()->ToBoolean(done));
1993 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1994}
1995
1996
1997void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1998 SetExpressionPosition(prop);
1999 Literal* key = prop->key()->AsLiteral();
2000 DCHECK(!key->value()->IsSmi());
2001 DCHECK(!prop->IsSuperAccess());
2002
2003 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2004 __ mov(LoadDescriptor::SlotRegister(),
2005 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002006 CallLoadIC(NOT_INSIDE_TYPEOF);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002007}
2008
2009
2010void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2011 Token::Value op,
2012 Expression* left,
2013 Expression* right) {
2014 // Do combined smi check of the operands. Left operand is on the
2015 // stack. Right operand is in eax.
2016 Label smi_case, done, stub_call;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002017 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002018 __ mov(ecx, eax);
2019 __ or_(eax, edx);
2020 JumpPatchSite patch_site(masm_);
2021 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2022
2023 __ bind(&stub_call);
2024 __ mov(eax, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002025 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002026 CallIC(code, expr->BinaryOperationFeedbackId());
2027 patch_site.EmitPatchInfo();
2028 __ jmp(&done, Label::kNear);
2029
2030 // Smi case.
2031 __ bind(&smi_case);
2032 __ mov(eax, edx); // Copy left operand in case of a stub call.
2033
2034 switch (op) {
2035 case Token::SAR:
2036 __ SmiUntag(ecx);
2037 __ sar_cl(eax); // No checks of result necessary
2038 __ and_(eax, Immediate(~kSmiTagMask));
2039 break;
2040 case Token::SHL: {
2041 Label result_ok;
2042 __ SmiUntag(eax);
2043 __ SmiUntag(ecx);
2044 __ shl_cl(eax);
2045 // Check that the *signed* result fits in a smi.
2046 __ cmp(eax, 0xc0000000);
2047 __ j(positive, &result_ok);
2048 __ SmiTag(ecx);
2049 __ jmp(&stub_call);
2050 __ bind(&result_ok);
2051 __ SmiTag(eax);
2052 break;
2053 }
2054 case Token::SHR: {
2055 Label result_ok;
2056 __ SmiUntag(eax);
2057 __ SmiUntag(ecx);
2058 __ shr_cl(eax);
2059 __ test(eax, Immediate(0xc0000000));
2060 __ j(zero, &result_ok);
2061 __ SmiTag(ecx);
2062 __ jmp(&stub_call);
2063 __ bind(&result_ok);
2064 __ SmiTag(eax);
2065 break;
2066 }
2067 case Token::ADD:
2068 __ add(eax, ecx);
2069 __ j(overflow, &stub_call);
2070 break;
2071 case Token::SUB:
2072 __ sub(eax, ecx);
2073 __ j(overflow, &stub_call);
2074 break;
2075 case Token::MUL: {
2076 __ SmiUntag(eax);
2077 __ imul(eax, ecx);
2078 __ j(overflow, &stub_call);
2079 __ test(eax, eax);
2080 __ j(not_zero, &done, Label::kNear);
2081 __ mov(ebx, edx);
2082 __ or_(ebx, ecx);
2083 __ j(negative, &stub_call);
2084 break;
2085 }
2086 case Token::BIT_OR:
2087 __ or_(eax, ecx);
2088 break;
2089 case Token::BIT_AND:
2090 __ and_(eax, ecx);
2091 break;
2092 case Token::BIT_XOR:
2093 __ xor_(eax, ecx);
2094 break;
2095 default:
2096 UNREACHABLE();
2097 }
2098
2099 __ bind(&done);
2100 context()->Plug(eax);
2101}
2102
2103
2104void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002105 for (int i = 0; i < lit->properties()->length(); i++) {
2106 ObjectLiteral::Property* property = lit->properties()->at(i);
2107 Expression* value = property->value();
2108
2109 if (property->is_static()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002110 PushOperand(Operand(esp, kPointerSize)); // constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002111 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002112 PushOperand(Operand(esp, 0)); // prototype
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002113 }
2114 EmitPropertyKey(property, lit->GetIdForProperty(i));
2115
2116 // The static prototype property is read only. We handle the non computed
2117 // property name case in the parser. Since this is the only case where we
2118 // need to check for an own read only property we special case this so we do
2119 // not need to do this for every property.
2120 if (property->is_static() && property->is_computed_name()) {
2121 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2122 __ push(eax);
2123 }
2124
2125 VisitForStackValue(value);
2126 if (NeedsHomeObject(value)) {
2127 EmitSetHomeObject(value, 2, property->GetSlot());
2128 }
2129
2130 switch (property->kind()) {
2131 case ObjectLiteral::Property::CONSTANT:
2132 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2133 case ObjectLiteral::Property::PROTOTYPE:
2134 UNREACHABLE();
2135 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002136 PushOperand(Smi::FromInt(DONT_ENUM));
2137 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2138 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002139 break;
2140
2141 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002142 PushOperand(Smi::FromInt(DONT_ENUM));
2143 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002144 break;
2145
2146 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002147 PushOperand(Smi::FromInt(DONT_ENUM));
2148 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002149 break;
2150 }
2151 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002152}
2153
2154
2155void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002156 PopOperand(edx);
2157 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002158 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2159 CallIC(code, expr->BinaryOperationFeedbackId());
2160 patch_site.EmitPatchInfo();
2161 context()->Plug(eax);
2162}
2163
2164
2165void FullCodeGenerator::EmitAssignment(Expression* expr,
2166 FeedbackVectorSlot slot) {
2167 DCHECK(expr->IsValidReferenceExpressionOrThis());
2168
2169 Property* prop = expr->AsProperty();
2170 LhsKind assign_type = Property::GetAssignType(prop);
2171
2172 switch (assign_type) {
2173 case VARIABLE: {
2174 Variable* var = expr->AsVariableProxy()->var();
2175 EffectContext context(this);
2176 EmitVariableAssignment(var, Token::ASSIGN, slot);
2177 break;
2178 }
2179 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002180 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002181 VisitForAccumulatorValue(prop->obj());
2182 __ Move(StoreDescriptor::ReceiverRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002183 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002184 __ mov(StoreDescriptor::NameRegister(),
2185 prop->key()->AsLiteral()->value());
2186 EmitLoadStoreICSlot(slot);
2187 CallStoreIC();
2188 break;
2189 }
2190 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002191 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002192 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2193 VisitForAccumulatorValue(
2194 prop->obj()->AsSuperPropertyReference()->home_object());
2195 // stack: value, this; eax: home_object
2196 Register scratch = ecx;
2197 Register scratch2 = edx;
2198 __ mov(scratch, result_register()); // home_object
2199 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2200 __ mov(scratch2, MemOperand(esp, 0)); // this
2201 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2202 __ mov(MemOperand(esp, 0), scratch); // home_object
2203 // stack: this, home_object. eax: value
2204 EmitNamedSuperPropertyStore(prop);
2205 break;
2206 }
2207 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002208 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002209 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2210 VisitForStackValue(
2211 prop->obj()->AsSuperPropertyReference()->home_object());
2212 VisitForAccumulatorValue(prop->key());
2213 Register scratch = ecx;
2214 Register scratch2 = edx;
2215 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2216 // stack: value, this, home_object; eax: key, edx: value
2217 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2218 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2219 __ mov(scratch, MemOperand(esp, 0)); // home_object
2220 __ mov(MemOperand(esp, kPointerSize), scratch);
2221 __ mov(MemOperand(esp, 0), eax);
2222 __ mov(eax, scratch2);
2223 // stack: this, home_object, key; eax: value.
2224 EmitKeyedSuperPropertyStore(prop);
2225 break;
2226 }
2227 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002228 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002229 VisitForStackValue(prop->obj());
2230 VisitForAccumulatorValue(prop->key());
2231 __ Move(StoreDescriptor::NameRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002232 PopOperand(StoreDescriptor::ReceiverRegister()); // Receiver.
2233 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002234 EmitLoadStoreICSlot(slot);
2235 Handle<Code> ic =
2236 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2237 CallIC(ic);
2238 break;
2239 }
2240 }
2241 context()->Plug(eax);
2242}
2243
2244
2245void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2246 Variable* var, MemOperand location) {
2247 __ mov(location, eax);
2248 if (var->IsContextSlot()) {
2249 __ mov(edx, eax);
2250 int offset = Context::SlotOffset(var->index());
2251 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2252 }
2253}
2254
2255
2256void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2257 FeedbackVectorSlot slot) {
2258 if (var->IsUnallocated()) {
2259 // Global var, const, or let.
2260 __ mov(StoreDescriptor::NameRegister(), var->name());
2261 __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2262 __ mov(StoreDescriptor::ReceiverRegister(),
2263 ContextOperand(StoreDescriptor::ReceiverRegister(),
2264 Context::EXTENSION_INDEX));
2265 EmitLoadStoreICSlot(slot);
2266 CallStoreIC();
2267
2268 } else if (var->mode() == LET && op != Token::INIT) {
2269 // Non-initializing assignment to let variable needs a write barrier.
2270 DCHECK(!var->IsLookupSlot());
2271 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2272 Label assign;
2273 MemOperand location = VarOperand(var, ecx);
2274 __ mov(edx, location);
2275 __ cmp(edx, isolate()->factory()->the_hole_value());
2276 __ j(not_equal, &assign, Label::kNear);
2277 __ push(Immediate(var->name()));
2278 __ CallRuntime(Runtime::kThrowReferenceError);
2279 __ bind(&assign);
2280 EmitStoreToStackLocalOrContextSlot(var, location);
2281
2282 } else if (var->mode() == CONST && op != Token::INIT) {
2283 // Assignment to const variable needs a write barrier.
2284 DCHECK(!var->IsLookupSlot());
2285 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2286 Label const_error;
2287 MemOperand location = VarOperand(var, ecx);
2288 __ mov(edx, location);
2289 __ cmp(edx, isolate()->factory()->the_hole_value());
2290 __ j(not_equal, &const_error, Label::kNear);
2291 __ push(Immediate(var->name()));
2292 __ CallRuntime(Runtime::kThrowReferenceError);
2293 __ bind(&const_error);
2294 __ CallRuntime(Runtime::kThrowConstAssignError);
2295
2296 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2297 // Initializing assignment to const {this} needs a write barrier.
2298 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2299 Label uninitialized_this;
2300 MemOperand location = VarOperand(var, ecx);
2301 __ mov(edx, location);
2302 __ cmp(edx, isolate()->factory()->the_hole_value());
2303 __ j(equal, &uninitialized_this);
2304 __ push(Immediate(var->name()));
2305 __ CallRuntime(Runtime::kThrowReferenceError);
2306 __ bind(&uninitialized_this);
2307 EmitStoreToStackLocalOrContextSlot(var, location);
2308
2309 } else if (!var->is_const_mode() ||
2310 (var->mode() == CONST && op == Token::INIT)) {
2311 if (var->IsLookupSlot()) {
2312 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002313 __ Push(Immediate(var->name()));
2314 __ Push(eax);
2315 __ CallRuntime(is_strict(language_mode())
2316 ? Runtime::kStoreLookupSlot_Strict
2317 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002318 } else {
2319 // Assignment to var or initializing assignment to let/const in harmony
2320 // mode.
2321 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2322 MemOperand location = VarOperand(var, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002323 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002324 // Check for an uninitialized let binding.
2325 __ mov(edx, location);
2326 __ cmp(edx, isolate()->factory()->the_hole_value());
2327 __ Check(equal, kLetBindingReInitialization);
2328 }
2329 EmitStoreToStackLocalOrContextSlot(var, location);
2330 }
2331
2332 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2333 // Const initializers need a write barrier.
2334 DCHECK(!var->IsParameter()); // No const parameters.
2335 if (var->IsLookupSlot()) {
2336 __ push(eax);
2337 __ push(esi);
2338 __ push(Immediate(var->name()));
2339 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2340 } else {
2341 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2342 Label skip;
2343 MemOperand location = VarOperand(var, ecx);
2344 __ mov(edx, location);
2345 __ cmp(edx, isolate()->factory()->the_hole_value());
2346 __ j(not_equal, &skip, Label::kNear);
2347 EmitStoreToStackLocalOrContextSlot(var, location);
2348 __ bind(&skip);
2349 }
2350
2351 } else {
2352 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2353 if (is_strict(language_mode())) {
2354 __ CallRuntime(Runtime::kThrowConstAssignError);
2355 }
2356 // Silently ignore store in sloppy mode.
2357 }
2358}
2359
2360
2361void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2362 // Assignment to a property, using a named store IC.
2363 // eax : value
2364 // esp[0] : receiver
2365 Property* prop = expr->target()->AsProperty();
2366 DCHECK(prop != NULL);
2367 DCHECK(prop->key()->IsLiteral());
2368
2369 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002370 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002371 EmitLoadStoreICSlot(expr->AssignmentSlot());
2372 CallStoreIC();
2373 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2374 context()->Plug(eax);
2375}
2376
2377
2378void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2379 // Assignment to named property of super.
2380 // eax : value
2381 // stack : receiver ('this'), home_object
2382 DCHECK(prop != NULL);
2383 Literal* key = prop->key()->AsLiteral();
2384 DCHECK(key != NULL);
2385
Ben Murdoch097c5b22016-05-18 11:27:45 +01002386 PushOperand(key->value());
2387 PushOperand(eax);
2388 CallRuntimeWithOperands(is_strict(language_mode())
2389 ? Runtime::kStoreToSuper_Strict
2390 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002391}
2392
2393
2394void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2395 // Assignment to named property of super.
2396 // eax : value
2397 // stack : receiver ('this'), home_object, key
2398
Ben Murdoch097c5b22016-05-18 11:27:45 +01002399 PushOperand(eax);
2400 CallRuntimeWithOperands(is_strict(language_mode())
2401 ? Runtime::kStoreKeyedToSuper_Strict
2402 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002403}
2404
2405
2406void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2407 // Assignment to a property, using a keyed store IC.
2408 // eax : value
2409 // esp[0] : key
2410 // esp[kPointerSize] : receiver
2411
Ben Murdoch097c5b22016-05-18 11:27:45 +01002412 PopOperand(StoreDescriptor::NameRegister()); // Key.
2413 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002414 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2415 Handle<Code> ic =
2416 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2417 EmitLoadStoreICSlot(expr->AssignmentSlot());
2418 CallIC(ic);
2419 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2420 context()->Plug(eax);
2421}
2422
2423
2424void FullCodeGenerator::VisitProperty(Property* expr) {
2425 Comment cmnt(masm_, "[ Property");
2426 SetExpressionPosition(expr);
2427
2428 Expression* key = expr->key();
2429
2430 if (key->IsPropertyName()) {
2431 if (!expr->IsSuperAccess()) {
2432 VisitForAccumulatorValue(expr->obj());
2433 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2434 EmitNamedPropertyLoad(expr);
2435 } else {
2436 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2437 VisitForStackValue(
2438 expr->obj()->AsSuperPropertyReference()->home_object());
2439 EmitNamedSuperPropertyLoad(expr);
2440 }
2441 } else {
2442 if (!expr->IsSuperAccess()) {
2443 VisitForStackValue(expr->obj());
2444 VisitForAccumulatorValue(expr->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002445 PopOperand(LoadDescriptor::ReceiverRegister()); // Object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2447 EmitKeyedPropertyLoad(expr);
2448 } else {
2449 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2450 VisitForStackValue(
2451 expr->obj()->AsSuperPropertyReference()->home_object());
2452 VisitForStackValue(expr->key());
2453 EmitKeyedSuperPropertyLoad(expr);
2454 }
2455 }
2456 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2457 context()->Plug(eax);
2458}
2459
2460
2461void FullCodeGenerator::CallIC(Handle<Code> code,
2462 TypeFeedbackId ast_id) {
2463 ic_total_count_++;
2464 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2465}
2466
2467
2468// Code common for calls using the IC.
2469void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2470 Expression* callee = expr->expression();
2471
2472 // Get the target function.
2473 ConvertReceiverMode convert_mode;
2474 if (callee->IsVariableProxy()) {
2475 { StackValueContext context(this);
2476 EmitVariableLoad(callee->AsVariableProxy());
2477 PrepareForBailout(callee, NO_REGISTERS);
2478 }
2479 // Push undefined as receiver. This is patched in the method prologue if it
2480 // is a sloppy mode method.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002481 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002482 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2483 } else {
2484 // Load the function from the receiver.
2485 DCHECK(callee->IsProperty());
2486 DCHECK(!callee->AsProperty()->IsSuperAccess());
2487 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2488 EmitNamedPropertyLoad(callee->AsProperty());
2489 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2490 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002491 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002492 __ mov(Operand(esp, kPointerSize), eax);
2493 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2494 }
2495
2496 EmitCall(expr, convert_mode);
2497}
2498
2499
2500void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2501 SetExpressionPosition(expr);
2502 Expression* callee = expr->expression();
2503 DCHECK(callee->IsProperty());
2504 Property* prop = callee->AsProperty();
2505 DCHECK(prop->IsSuperAccess());
2506
2507 Literal* key = prop->key()->AsLiteral();
2508 DCHECK(!key->value()->IsSmi());
2509 // Load the function from the receiver.
2510 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2511 VisitForStackValue(super_ref->home_object());
2512 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002513 PushOperand(eax);
2514 PushOperand(eax);
2515 PushOperand(Operand(esp, kPointerSize * 2));
2516 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002517 // Stack here:
2518 // - home_object
2519 // - this (receiver)
2520 // - this (receiver) <-- LoadFromSuper will pop here and below.
2521 // - home_object
2522 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002523 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002524
2525 // Replace home_object with target function.
2526 __ mov(Operand(esp, kPointerSize), eax);
2527
2528 // Stack here:
2529 // - target function
2530 // - this (receiver)
2531 EmitCall(expr);
2532}
2533
2534
2535// Code common for calls using the IC.
2536void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2537 Expression* key) {
2538 // Load the key.
2539 VisitForAccumulatorValue(key);
2540
2541 Expression* callee = expr->expression();
2542
2543 // Load the function from the receiver.
2544 DCHECK(callee->IsProperty());
2545 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2546 __ mov(LoadDescriptor::NameRegister(), eax);
2547 EmitKeyedPropertyLoad(callee->AsProperty());
2548 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2549
2550 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002551 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002552 __ mov(Operand(esp, kPointerSize), eax);
2553
2554 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2555}
2556
2557
2558void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2559 Expression* callee = expr->expression();
2560 DCHECK(callee->IsProperty());
2561 Property* prop = callee->AsProperty();
2562 DCHECK(prop->IsSuperAccess());
2563
2564 SetExpressionPosition(prop);
2565 // Load the function from the receiver.
2566 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2567 VisitForStackValue(super_ref->home_object());
2568 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002569 PushOperand(eax);
2570 PushOperand(eax);
2571 PushOperand(Operand(esp, kPointerSize * 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002572 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002573 // Stack here:
2574 // - home_object
2575 // - this (receiver)
2576 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2577 // - home_object
2578 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002579 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002580
2581 // Replace home_object with target function.
2582 __ mov(Operand(esp, kPointerSize), eax);
2583
2584 // Stack here:
2585 // - target function
2586 // - this (receiver)
2587 EmitCall(expr);
2588}
2589
2590
2591void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2592 // Load the arguments.
2593 ZoneList<Expression*>* args = expr->arguments();
2594 int arg_count = args->length();
2595 for (int i = 0; i < arg_count; i++) {
2596 VisitForStackValue(args->at(i));
2597 }
2598
2599 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2600 SetCallPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002601 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2602 if (FLAG_trace) {
2603 __ CallRuntime(Runtime::kTraceTailCall);
2604 }
2605 // Update profiling counters before the tail call since we will
2606 // not return to this function.
2607 EmitProfilingCounterHandlingForReturnSequence(true);
2608 }
2609 Handle<Code> ic =
2610 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2611 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002612 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2613 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2614 // Don't assign a type feedback id to the IC, since type feedback is provided
2615 // by the vector above.
2616 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002617 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002618
2619 RecordJSReturnSite(expr);
2620
2621 // Restore context register.
2622 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2623
2624 context()->DropAndPlug(1, eax);
2625}
2626
2627
2628void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2629 // Push copy of the first argument or undefined if it doesn't exist.
2630 if (arg_count > 0) {
2631 __ push(Operand(esp, arg_count * kPointerSize));
2632 } else {
2633 __ push(Immediate(isolate()->factory()->undefined_value()));
2634 }
2635
2636 // Push the enclosing function.
2637 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2638
2639 // Push the language mode.
2640 __ push(Immediate(Smi::FromInt(language_mode())));
2641
2642 // Push the start position of the scope the calls resides in.
2643 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2644
2645 // Do the runtime call.
2646 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2647}
2648
2649
2650// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2651void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2652 VariableProxy* callee = expr->expression()->AsVariableProxy();
2653 if (callee->var()->IsLookupSlot()) {
2654 Label slow, done;
2655 SetExpressionPosition(callee);
2656 // Generate code for loading from variables potentially shadowed by
2657 // eval-introduced variables.
2658 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2659
2660 __ bind(&slow);
2661 // Call the runtime to find the function to call (returned in eax) and
2662 // the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002663 __ Push(callee->name());
2664 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2665 PushOperand(eax); // Function.
2666 PushOperand(edx); // Receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002667 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2668
2669 // If fast case code has been generated, emit code to push the function
2670 // and receiver and have the slow path jump around this code.
2671 if (done.is_linked()) {
2672 Label call;
2673 __ jmp(&call, Label::kNear);
2674 __ bind(&done);
2675 // Push function.
2676 __ push(eax);
2677 // The receiver is implicitly the global receiver. Indicate this by
2678 // passing the hole to the call function stub.
2679 __ push(Immediate(isolate()->factory()->undefined_value()));
2680 __ bind(&call);
2681 }
2682 } else {
2683 VisitForStackValue(callee);
2684 // refEnv.WithBaseObject()
Ben Murdoch097c5b22016-05-18 11:27:45 +01002685 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002686 }
2687}
2688
2689
2690void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2691 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2692 // to resolve the function we need to call. Then we call the resolved
2693 // function using the given arguments.
2694 ZoneList<Expression*>* args = expr->arguments();
2695 int arg_count = args->length();
2696
2697 PushCalleeAndWithBaseObject(expr);
2698
2699 // Push the arguments.
2700 for (int i = 0; i < arg_count; i++) {
2701 VisitForStackValue(args->at(i));
2702 }
2703
2704 // Push a copy of the function (found below the arguments) and
2705 // resolve eval.
2706 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2707 EmitResolvePossiblyDirectEval(arg_count);
2708
2709 // Touch up the stack with the resolved function.
2710 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2711
2712 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2713
2714 SetCallPosition(expr);
2715 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2716 __ Set(eax, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002717 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2718 expr->tail_call_mode()),
2719 RelocInfo::CODE_TARGET);
2720 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002721 RecordJSReturnSite(expr);
2722 // Restore context register.
2723 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2724 context()->DropAndPlug(1, eax);
2725}
2726
2727
2728void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2729 Comment cmnt(masm_, "[ CallNew");
2730 // According to ECMA-262, section 11.2.2, page 44, the function
2731 // expression in new calls must be evaluated before the
2732 // arguments.
2733
2734 // Push constructor on the stack. If it's not a function it's used as
2735 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2736 // ignored.
2737 DCHECK(!expr->expression()->IsSuperPropertyReference());
2738 VisitForStackValue(expr->expression());
2739
2740 // Push the arguments ("left-to-right") on the stack.
2741 ZoneList<Expression*>* args = expr->arguments();
2742 int arg_count = args->length();
2743 for (int i = 0; i < arg_count; i++) {
2744 VisitForStackValue(args->at(i));
2745 }
2746
2747 // Call the construct call builtin that handles allocation and
2748 // constructor invocation.
2749 SetConstructCallPosition(expr);
2750
2751 // Load function and argument count into edi and eax.
2752 __ Move(eax, Immediate(arg_count));
2753 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2754
2755 // Record call targets in unoptimized code.
2756 __ EmitLoadTypeFeedbackVector(ebx);
2757 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2758
2759 CallConstructStub stub(isolate());
2760 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002761 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002762 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2763 // Restore context register.
2764 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2765 context()->Plug(eax);
2766}
2767
2768
2769void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2770 SuperCallReference* super_call_ref =
2771 expr->expression()->AsSuperCallReference();
2772 DCHECK_NOT_NULL(super_call_ref);
2773
2774 // Push the super constructor target on the stack (may be null,
2775 // but the Construct builtin can deal with that properly).
2776 VisitForAccumulatorValue(super_call_ref->this_function_var());
2777 __ AssertFunction(result_register());
2778 __ mov(result_register(),
2779 FieldOperand(result_register(), HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002780 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002781
2782 // Push the arguments ("left-to-right") on the stack.
2783 ZoneList<Expression*>* args = expr->arguments();
2784 int arg_count = args->length();
2785 for (int i = 0; i < arg_count; i++) {
2786 VisitForStackValue(args->at(i));
2787 }
2788
2789 // Call the construct call builtin that handles allocation and
2790 // constructor invocation.
2791 SetConstructCallPosition(expr);
2792
2793 // Load new target into edx.
2794 VisitForAccumulatorValue(super_call_ref->new_target_var());
2795 __ mov(edx, result_register());
2796
2797 // Load function and argument count into edi and eax.
2798 __ Move(eax, Immediate(arg_count));
2799 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2800
2801 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002802 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002803
2804 RecordJSReturnSite(expr);
2805
2806 // Restore context register.
2807 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2808 context()->Plug(eax);
2809}
2810
2811
2812void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2813 ZoneList<Expression*>* args = expr->arguments();
2814 DCHECK(args->length() == 1);
2815
2816 VisitForAccumulatorValue(args->at(0));
2817
2818 Label materialize_true, materialize_false;
2819 Label* if_true = NULL;
2820 Label* if_false = NULL;
2821 Label* fall_through = NULL;
2822 context()->PrepareTest(&materialize_true, &materialize_false,
2823 &if_true, &if_false, &fall_through);
2824
2825 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2826 __ test(eax, Immediate(kSmiTagMask));
2827 Split(zero, if_true, if_false, fall_through);
2828
2829 context()->Plug(if_true, if_false);
2830}
2831
2832
2833void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2834 ZoneList<Expression*>* args = expr->arguments();
2835 DCHECK(args->length() == 1);
2836
2837 VisitForAccumulatorValue(args->at(0));
2838
2839 Label materialize_true, materialize_false;
2840 Label* if_true = NULL;
2841 Label* if_false = NULL;
2842 Label* fall_through = NULL;
2843 context()->PrepareTest(&materialize_true, &materialize_false,
2844 &if_true, &if_false, &fall_through);
2845
2846 __ JumpIfSmi(eax, if_false);
2847 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2848 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2849 Split(above_equal, if_true, if_false, fall_through);
2850
2851 context()->Plug(if_true, if_false);
2852}
2853
2854
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002855void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2856 ZoneList<Expression*>* args = expr->arguments();
2857 DCHECK(args->length() == 1);
2858
2859 VisitForAccumulatorValue(args->at(0));
2860
2861 Label materialize_true, materialize_false;
2862 Label* if_true = NULL;
2863 Label* if_false = NULL;
2864 Label* fall_through = NULL;
2865 context()->PrepareTest(&materialize_true, &materialize_false,
2866 &if_true, &if_false, &fall_through);
2867
2868 __ JumpIfSmi(eax, if_false);
2869 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2870 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2871 Split(equal, if_true, if_false, fall_through);
2872
2873 context()->Plug(if_true, if_false);
2874}
2875
2876
2877void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2878 ZoneList<Expression*>* args = expr->arguments();
2879 DCHECK(args->length() == 1);
2880
2881 VisitForAccumulatorValue(args->at(0));
2882
2883 Label materialize_true, materialize_false;
2884 Label* if_true = NULL;
2885 Label* if_false = NULL;
2886 Label* fall_through = NULL;
2887 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2888 &if_false, &fall_through);
2889
2890 __ JumpIfSmi(eax, if_false);
2891 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2892 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2893 Split(equal, if_true, if_false, fall_through);
2894
2895 context()->Plug(if_true, if_false);
2896}
2897
2898
2899void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2900 ZoneList<Expression*>* args = expr->arguments();
2901 DCHECK(args->length() == 1);
2902
2903 VisitForAccumulatorValue(args->at(0));
2904
2905 Label materialize_true, materialize_false;
2906 Label* if_true = NULL;
2907 Label* if_false = NULL;
2908 Label* fall_through = NULL;
2909 context()->PrepareTest(&materialize_true, &materialize_false,
2910 &if_true, &if_false, &fall_through);
2911
2912 __ JumpIfSmi(eax, if_false);
2913 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2914 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2915 Split(equal, if_true, if_false, fall_through);
2916
2917 context()->Plug(if_true, if_false);
2918}
2919
2920
2921void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2922 ZoneList<Expression*>* args = expr->arguments();
2923 DCHECK(args->length() == 1);
2924
2925 VisitForAccumulatorValue(args->at(0));
2926
2927 Label materialize_true, materialize_false;
2928 Label* if_true = NULL;
2929 Label* if_false = NULL;
2930 Label* fall_through = NULL;
2931 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2932 &if_false, &fall_through);
2933
2934 __ JumpIfSmi(eax, if_false);
2935 __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2936 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2937 Split(equal, if_true, if_false, fall_through);
2938
2939 context()->Plug(if_true, if_false);
2940}
2941
2942
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002943void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2944 ZoneList<Expression*>* args = expr->arguments();
2945 DCHECK(args->length() == 1);
2946 Label done, null, function, non_function_constructor;
2947
2948 VisitForAccumulatorValue(args->at(0));
2949
2950 // If the object is not a JSReceiver, we return null.
2951 __ JumpIfSmi(eax, &null, Label::kNear);
2952 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2953 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2954 __ j(below, &null, Label::kNear);
2955
2956 // Return 'Function' for JSFunction objects.
2957 __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
2958 __ j(equal, &function, Label::kNear);
2959
2960 // Check if the constructor in the map is a JS function.
2961 __ GetMapConstructor(eax, eax, ebx);
2962 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2963 __ j(not_equal, &non_function_constructor, Label::kNear);
2964
2965 // eax now contains the constructor function. Grab the
2966 // instance class name from there.
2967 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2968 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2969 __ jmp(&done, Label::kNear);
2970
2971 // Non-JS objects have class null.
2972 __ bind(&null);
2973 __ mov(eax, isolate()->factory()->null_value());
2974 __ jmp(&done, Label::kNear);
2975
2976 // Functions have class 'Function'.
2977 __ bind(&function);
2978 __ mov(eax, isolate()->factory()->Function_string());
2979 __ jmp(&done, Label::kNear);
2980
2981 // Objects with a non-function constructor have class 'Object'.
2982 __ bind(&non_function_constructor);
2983 __ mov(eax, isolate()->factory()->Object_string());
2984
2985 // All done.
2986 __ bind(&done);
2987
2988 context()->Plug(eax);
2989}
2990
2991
2992void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2993 ZoneList<Expression*>* args = expr->arguments();
2994 DCHECK(args->length() == 1);
2995
2996 VisitForAccumulatorValue(args->at(0)); // Load the object.
2997
2998 Label done;
2999 // If the object is a smi return the object.
3000 __ JumpIfSmi(eax, &done, Label::kNear);
3001 // If the object is not a value type, return the object.
3002 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3003 __ j(not_equal, &done, Label::kNear);
3004 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3005
3006 __ bind(&done);
3007 context()->Plug(eax);
3008}
3009
3010
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003011void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3012 ZoneList<Expression*>* args = expr->arguments();
3013 DCHECK_EQ(3, args->length());
3014
3015 Register string = eax;
3016 Register index = ebx;
3017 Register value = ecx;
3018
3019 VisitForStackValue(args->at(0)); // index
3020 VisitForStackValue(args->at(1)); // value
3021 VisitForAccumulatorValue(args->at(2)); // string
3022
Ben Murdoch097c5b22016-05-18 11:27:45 +01003023 PopOperand(value);
3024 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003025
3026 if (FLAG_debug_code) {
3027 __ test(value, Immediate(kSmiTagMask));
3028 __ Check(zero, kNonSmiValue);
3029 __ test(index, Immediate(kSmiTagMask));
3030 __ Check(zero, kNonSmiValue);
3031 }
3032
3033 __ SmiUntag(value);
3034 __ SmiUntag(index);
3035
3036 if (FLAG_debug_code) {
3037 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3038 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3039 }
3040
3041 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3042 value);
3043 context()->Plug(string);
3044}
3045
3046
3047void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3048 ZoneList<Expression*>* args = expr->arguments();
3049 DCHECK_EQ(3, args->length());
3050
3051 Register string = eax;
3052 Register index = ebx;
3053 Register value = ecx;
3054
3055 VisitForStackValue(args->at(0)); // index
3056 VisitForStackValue(args->at(1)); // value
3057 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003058 PopOperand(value);
3059 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003060
3061 if (FLAG_debug_code) {
3062 __ test(value, Immediate(kSmiTagMask));
3063 __ Check(zero, kNonSmiValue);
3064 __ test(index, Immediate(kSmiTagMask));
3065 __ Check(zero, kNonSmiValue);
3066 __ SmiUntag(index);
3067 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3068 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3069 __ SmiTag(index);
3070 }
3071
3072 __ SmiUntag(value);
3073 // No need to untag a smi for two-byte addressing.
3074 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3075 value);
3076 context()->Plug(string);
3077}
3078
3079
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003080void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3081 ZoneList<Expression*>* args = expr->arguments();
3082 DCHECK_EQ(1, args->length());
3083
3084 // Load the argument into eax and convert it.
3085 VisitForAccumulatorValue(args->at(0));
3086
3087 // Convert the object to an integer.
3088 Label done_convert;
3089 __ JumpIfSmi(eax, &done_convert, Label::kNear);
3090 __ Push(eax);
3091 __ CallRuntime(Runtime::kToInteger);
3092 __ bind(&done_convert);
3093 context()->Plug(eax);
3094}
3095
3096
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003097void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3098 ZoneList<Expression*>* args = expr->arguments();
3099 DCHECK(args->length() == 1);
3100
3101 VisitForAccumulatorValue(args->at(0));
3102
3103 Label done;
3104 StringCharFromCodeGenerator generator(eax, ebx);
3105 generator.GenerateFast(masm_);
3106 __ jmp(&done);
3107
3108 NopRuntimeCallHelper call_helper;
3109 generator.GenerateSlow(masm_, call_helper);
3110
3111 __ bind(&done);
3112 context()->Plug(ebx);
3113}
3114
3115
3116void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3117 ZoneList<Expression*>* args = expr->arguments();
3118 DCHECK(args->length() == 2);
3119
3120 VisitForStackValue(args->at(0));
3121 VisitForAccumulatorValue(args->at(1));
3122
3123 Register object = ebx;
3124 Register index = eax;
3125 Register result = edx;
3126
Ben Murdoch097c5b22016-05-18 11:27:45 +01003127 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003128
3129 Label need_conversion;
3130 Label index_out_of_range;
3131 Label done;
3132 StringCharCodeAtGenerator generator(object,
3133 index,
3134 result,
3135 &need_conversion,
3136 &need_conversion,
3137 &index_out_of_range,
3138 STRING_INDEX_IS_NUMBER);
3139 generator.GenerateFast(masm_);
3140 __ jmp(&done);
3141
3142 __ bind(&index_out_of_range);
3143 // When the index is out of range, the spec requires us to return
3144 // NaN.
3145 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3146 __ jmp(&done);
3147
3148 __ bind(&need_conversion);
3149 // Move the undefined value into the result register, which will
3150 // trigger conversion.
3151 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3152 __ jmp(&done);
3153
3154 NopRuntimeCallHelper call_helper;
3155 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3156
3157 __ bind(&done);
3158 context()->Plug(result);
3159}
3160
3161
3162void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3163 ZoneList<Expression*>* args = expr->arguments();
3164 DCHECK(args->length() == 2);
3165
3166 VisitForStackValue(args->at(0));
3167 VisitForAccumulatorValue(args->at(1));
3168
3169 Register object = ebx;
3170 Register index = eax;
3171 Register scratch = edx;
3172 Register result = eax;
3173
Ben Murdoch097c5b22016-05-18 11:27:45 +01003174 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003175
3176 Label need_conversion;
3177 Label index_out_of_range;
3178 Label done;
3179 StringCharAtGenerator generator(object,
3180 index,
3181 scratch,
3182 result,
3183 &need_conversion,
3184 &need_conversion,
3185 &index_out_of_range,
3186 STRING_INDEX_IS_NUMBER);
3187 generator.GenerateFast(masm_);
3188 __ jmp(&done);
3189
3190 __ bind(&index_out_of_range);
3191 // When the index is out of range, the spec requires us to return
3192 // the empty string.
3193 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3194 __ jmp(&done);
3195
3196 __ bind(&need_conversion);
3197 // Move smi zero into the result register, which will trigger
3198 // conversion.
3199 __ Move(result, Immediate(Smi::FromInt(0)));
3200 __ jmp(&done);
3201
3202 NopRuntimeCallHelper call_helper;
3203 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3204
3205 __ bind(&done);
3206 context()->Plug(result);
3207}
3208
3209
3210void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3211 ZoneList<Expression*>* args = expr->arguments();
3212 DCHECK_LE(2, args->length());
3213 // Push target, receiver and arguments onto the stack.
3214 for (Expression* const arg : *args) {
3215 VisitForStackValue(arg);
3216 }
3217 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3218 // Move target to edi.
3219 int const argc = args->length() - 2;
3220 __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
3221 // Call the target.
3222 __ mov(eax, Immediate(argc));
3223 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003224 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003225 // Restore context register.
3226 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3227 // Discard the function left on TOS.
3228 context()->DropAndPlug(1, eax);
3229}
3230
3231
3232void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3233 ZoneList<Expression*>* args = expr->arguments();
3234 DCHECK(args->length() == 1);
3235
3236 VisitForAccumulatorValue(args->at(0));
3237
3238 __ AssertString(eax);
3239
3240 Label materialize_true, materialize_false;
3241 Label* if_true = NULL;
3242 Label* if_false = NULL;
3243 Label* fall_through = NULL;
3244 context()->PrepareTest(&materialize_true, &materialize_false,
3245 &if_true, &if_false, &fall_through);
3246
3247 __ test(FieldOperand(eax, String::kHashFieldOffset),
3248 Immediate(String::kContainsCachedArrayIndexMask));
3249 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3250 Split(zero, if_true, if_false, fall_through);
3251
3252 context()->Plug(if_true, if_false);
3253}
3254
3255
3256void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3257 ZoneList<Expression*>* args = expr->arguments();
3258 DCHECK(args->length() == 1);
3259 VisitForAccumulatorValue(args->at(0));
3260
3261 __ AssertString(eax);
3262
3263 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3264 __ IndexFromHash(eax, eax);
3265
3266 context()->Plug(eax);
3267}
3268
3269
3270void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3271 ZoneList<Expression*>* args = expr->arguments();
3272 DCHECK_EQ(1, args->length());
3273 VisitForAccumulatorValue(args->at(0));
3274 __ AssertFunction(eax);
3275 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3276 __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
3277 context()->Plug(eax);
3278}
3279
3280
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003281void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3282 DCHECK(expr->arguments()->length() == 0);
3283 ExternalReference debug_is_active =
3284 ExternalReference::debug_is_active_address(isolate());
3285 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
3286 __ SmiTag(eax);
3287 context()->Plug(eax);
3288}
3289
3290
3291void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3292 ZoneList<Expression*>* args = expr->arguments();
3293 DCHECK_EQ(2, args->length());
3294 VisitForStackValue(args->at(0));
3295 VisitForStackValue(args->at(1));
3296
3297 Label runtime, done;
3298
3299 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime, TAG_OBJECT);
3300 __ mov(ebx, NativeContextOperand());
3301 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
3302 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
3303 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3304 isolate()->factory()->empty_fixed_array());
3305 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3306 isolate()->factory()->empty_fixed_array());
3307 __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
3308 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
3309 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3310 __ jmp(&done, Label::kNear);
3311
3312 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003313 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003314
3315 __ bind(&done);
3316 context()->Plug(eax);
3317}
3318
3319
3320void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3321 // Push undefined as receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003322 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003323
3324 __ LoadGlobalFunction(expr->context_index(), eax);
3325}
3326
3327
3328void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3329 ZoneList<Expression*>* args = expr->arguments();
3330 int arg_count = args->length();
3331
3332 SetCallPosition(expr);
3333 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3334 __ Set(eax, arg_count);
3335 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3336 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003337 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003338}
3339
3340
3341void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3342 ZoneList<Expression*>* args = expr->arguments();
3343 int arg_count = args->length();
3344
3345 if (expr->is_jsruntime()) {
3346 Comment cmnt(masm_, "[ CallRuntime");
3347 EmitLoadJSRuntimeFunction(expr);
3348
3349 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003350 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003351 __ mov(Operand(esp, kPointerSize), eax);
3352
3353 // Push the arguments ("left-to-right").
3354 for (int i = 0; i < arg_count; i++) {
3355 VisitForStackValue(args->at(i));
3356 }
3357
3358 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3359 EmitCallJSRuntimeFunction(expr);
3360
3361 // Restore context register.
3362 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3363 context()->DropAndPlug(1, eax);
3364
3365 } else {
3366 const Runtime::Function* function = expr->function();
3367 switch (function->function_id) {
3368#define CALL_INTRINSIC_GENERATOR(Name) \
3369 case Runtime::kInline##Name: { \
3370 Comment cmnt(masm_, "[ Inline" #Name); \
3371 return Emit##Name(expr); \
3372 }
3373 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3374#undef CALL_INTRINSIC_GENERATOR
3375 default: {
3376 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3377 // Push the arguments ("left-to-right").
3378 for (int i = 0; i < arg_count; i++) {
3379 VisitForStackValue(args->at(i));
3380 }
3381
3382 // Call the C runtime function.
3383 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3384 __ CallRuntime(expr->function(), arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003385 OperandStackDepthDecrement(arg_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003386 context()->Plug(eax);
3387 }
3388 }
3389 }
3390}
3391
3392
3393void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3394 switch (expr->op()) {
3395 case Token::DELETE: {
3396 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3397 Property* property = expr->expression()->AsProperty();
3398 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3399
3400 if (property != NULL) {
3401 VisitForStackValue(property->obj());
3402 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003403 CallRuntimeWithOperands(is_strict(language_mode())
3404 ? Runtime::kDeleteProperty_Strict
3405 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003406 context()->Plug(eax);
3407 } else if (proxy != NULL) {
3408 Variable* var = proxy->var();
3409 // Delete of an unqualified identifier is disallowed in strict mode but
3410 // "delete this" is allowed.
3411 bool is_this = var->HasThisName(isolate());
3412 DCHECK(is_sloppy(language_mode()) || is_this);
3413 if (var->IsUnallocatedOrGlobalSlot()) {
3414 __ mov(eax, NativeContextOperand());
3415 __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
3416 __ push(Immediate(var->name()));
3417 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3418 context()->Plug(eax);
3419 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3420 // Result of deleting non-global variables is false. 'this' is
3421 // not really a variable, though we implement it as one. The
3422 // subexpression does not have side effects.
3423 context()->Plug(is_this);
3424 } else {
3425 // Non-global variable. Call the runtime to try to delete from the
3426 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003427 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003428 __ CallRuntime(Runtime::kDeleteLookupSlot);
3429 context()->Plug(eax);
3430 }
3431 } else {
3432 // Result of deleting non-property, non-variable reference is true.
3433 // The subexpression may have side effects.
3434 VisitForEffect(expr->expression());
3435 context()->Plug(true);
3436 }
3437 break;
3438 }
3439
3440 case Token::VOID: {
3441 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3442 VisitForEffect(expr->expression());
3443 context()->Plug(isolate()->factory()->undefined_value());
3444 break;
3445 }
3446
3447 case Token::NOT: {
3448 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3449 if (context()->IsEffect()) {
3450 // Unary NOT has no side effects so it's only necessary to visit the
3451 // subexpression. Match the optimizing compiler by not branching.
3452 VisitForEffect(expr->expression());
3453 } else if (context()->IsTest()) {
3454 const TestContext* test = TestContext::cast(context());
3455 // The labels are swapped for the recursive call.
3456 VisitForControl(expr->expression(),
3457 test->false_label(),
3458 test->true_label(),
3459 test->fall_through());
3460 context()->Plug(test->true_label(), test->false_label());
3461 } else {
3462 // We handle value contexts explicitly rather than simply visiting
3463 // for control and plugging the control flow into the context,
3464 // because we need to prepare a pair of extra administrative AST ids
3465 // for the optimizing compiler.
3466 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3467 Label materialize_true, materialize_false, done;
3468 VisitForControl(expr->expression(),
3469 &materialize_false,
3470 &materialize_true,
3471 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003472 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003473 __ bind(&materialize_true);
3474 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3475 if (context()->IsAccumulatorValue()) {
3476 __ mov(eax, isolate()->factory()->true_value());
3477 } else {
3478 __ Push(isolate()->factory()->true_value());
3479 }
3480 __ jmp(&done, Label::kNear);
3481 __ bind(&materialize_false);
3482 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3483 if (context()->IsAccumulatorValue()) {
3484 __ mov(eax, isolate()->factory()->false_value());
3485 } else {
3486 __ Push(isolate()->factory()->false_value());
3487 }
3488 __ bind(&done);
3489 }
3490 break;
3491 }
3492
3493 case Token::TYPEOF: {
3494 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3495 {
3496 AccumulatorValueContext context(this);
3497 VisitForTypeofValue(expr->expression());
3498 }
3499 __ mov(ebx, eax);
3500 TypeofStub typeof_stub(isolate());
3501 __ CallStub(&typeof_stub);
3502 context()->Plug(eax);
3503 break;
3504 }
3505
3506 default:
3507 UNREACHABLE();
3508 }
3509}
3510
3511
3512void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3513 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3514
3515 Comment cmnt(masm_, "[ CountOperation");
3516
3517 Property* prop = expr->expression()->AsProperty();
3518 LhsKind assign_type = Property::GetAssignType(prop);
3519
3520 // Evaluate expression and get value.
3521 if (assign_type == VARIABLE) {
3522 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3523 AccumulatorValueContext context(this);
3524 EmitVariableLoad(expr->expression()->AsVariableProxy());
3525 } else {
3526 // Reserve space for result of postfix operation.
3527 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003528 PushOperand(Smi::FromInt(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003529 }
3530 switch (assign_type) {
3531 case NAMED_PROPERTY: {
3532 // Put the object both on the stack and in the register.
3533 VisitForStackValue(prop->obj());
3534 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
3535 EmitNamedPropertyLoad(prop);
3536 break;
3537 }
3538
3539 case NAMED_SUPER_PROPERTY: {
3540 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3541 VisitForAccumulatorValue(
3542 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003543 PushOperand(result_register());
3544 PushOperand(MemOperand(esp, kPointerSize));
3545 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003546 EmitNamedSuperPropertyLoad(prop);
3547 break;
3548 }
3549
3550 case KEYED_SUPER_PROPERTY: {
3551 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3552 VisitForStackValue(
3553 prop->obj()->AsSuperPropertyReference()->home_object());
3554 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003555 PushOperand(result_register());
3556 PushOperand(MemOperand(esp, 2 * kPointerSize));
3557 PushOperand(MemOperand(esp, 2 * kPointerSize));
3558 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003559 EmitKeyedSuperPropertyLoad(prop);
3560 break;
3561 }
3562
3563 case KEYED_PROPERTY: {
3564 VisitForStackValue(prop->obj());
3565 VisitForStackValue(prop->key());
3566 __ mov(LoadDescriptor::ReceiverRegister(),
3567 Operand(esp, kPointerSize)); // Object.
3568 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
3569 EmitKeyedPropertyLoad(prop);
3570 break;
3571 }
3572
3573 case VARIABLE:
3574 UNREACHABLE();
3575 }
3576 }
3577
3578 // We need a second deoptimization point after loading the value
3579 // in case evaluating the property load my have a side effect.
3580 if (assign_type == VARIABLE) {
3581 PrepareForBailout(expr->expression(), TOS_REG);
3582 } else {
3583 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3584 }
3585
3586 // Inline smi case if we are in a loop.
3587 Label done, stub_call;
3588 JumpPatchSite patch_site(masm_);
3589 if (ShouldInlineSmiCase(expr->op())) {
3590 Label slow;
3591 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
3592
3593 // Save result for postfix expressions.
3594 if (expr->is_postfix()) {
3595 if (!context()->IsEffect()) {
3596 // Save the result on the stack. If we have a named or keyed property
3597 // we store the result under the receiver that is currently on top
3598 // of the stack.
3599 switch (assign_type) {
3600 case VARIABLE:
3601 __ push(eax);
3602 break;
3603 case NAMED_PROPERTY:
3604 __ mov(Operand(esp, kPointerSize), eax);
3605 break;
3606 case NAMED_SUPER_PROPERTY:
3607 __ mov(Operand(esp, 2 * kPointerSize), eax);
3608 break;
3609 case KEYED_PROPERTY:
3610 __ mov(Operand(esp, 2 * kPointerSize), eax);
3611 break;
3612 case KEYED_SUPER_PROPERTY:
3613 __ mov(Operand(esp, 3 * kPointerSize), eax);
3614 break;
3615 }
3616 }
3617 }
3618
3619 if (expr->op() == Token::INC) {
3620 __ add(eax, Immediate(Smi::FromInt(1)));
3621 } else {
3622 __ sub(eax, Immediate(Smi::FromInt(1)));
3623 }
3624 __ j(no_overflow, &done, Label::kNear);
3625 // Call stub. Undo operation first.
3626 if (expr->op() == Token::INC) {
3627 __ sub(eax, Immediate(Smi::FromInt(1)));
3628 } else {
3629 __ add(eax, Immediate(Smi::FromInt(1)));
3630 }
3631 __ jmp(&stub_call, Label::kNear);
3632 __ bind(&slow);
3633 }
3634 if (!is_strong(language_mode())) {
3635 ToNumberStub convert_stub(isolate());
3636 __ CallStub(&convert_stub);
3637 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3638 }
3639
3640 // Save result for postfix expressions.
3641 if (expr->is_postfix()) {
3642 if (!context()->IsEffect()) {
3643 // Save the result on the stack. If we have a named or keyed property
3644 // we store the result under the receiver that is currently on top
3645 // of the stack.
3646 switch (assign_type) {
3647 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003648 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003649 break;
3650 case NAMED_PROPERTY:
3651 __ mov(Operand(esp, kPointerSize), eax);
3652 break;
3653 case NAMED_SUPER_PROPERTY:
3654 __ mov(Operand(esp, 2 * kPointerSize), eax);
3655 break;
3656 case KEYED_PROPERTY:
3657 __ mov(Operand(esp, 2 * kPointerSize), eax);
3658 break;
3659 case KEYED_SUPER_PROPERTY:
3660 __ mov(Operand(esp, 3 * kPointerSize), eax);
3661 break;
3662 }
3663 }
3664 }
3665
3666 SetExpressionPosition(expr);
3667
3668 // Call stub for +1/-1.
3669 __ bind(&stub_call);
3670 __ mov(edx, eax);
3671 __ mov(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003672 Handle<Code> code =
3673 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003674 CallIC(code, expr->CountBinOpFeedbackId());
3675 patch_site.EmitPatchInfo();
3676 __ bind(&done);
3677
3678 if (is_strong(language_mode())) {
3679 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3680 }
3681 // Store the value returned in eax.
3682 switch (assign_type) {
3683 case VARIABLE:
3684 if (expr->is_postfix()) {
3685 // Perform the assignment as if via '='.
3686 { EffectContext context(this);
3687 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3688 Token::ASSIGN, expr->CountSlot());
3689 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3690 context.Plug(eax);
3691 }
3692 // For all contexts except EffectContext We have the result on
3693 // top of the stack.
3694 if (!context()->IsEffect()) {
3695 context()->PlugTOS();
3696 }
3697 } else {
3698 // Perform the assignment as if via '='.
3699 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3700 Token::ASSIGN, expr->CountSlot());
3701 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3702 context()->Plug(eax);
3703 }
3704 break;
3705 case NAMED_PROPERTY: {
3706 __ mov(StoreDescriptor::NameRegister(),
3707 prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003708 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003709 EmitLoadStoreICSlot(expr->CountSlot());
3710 CallStoreIC();
3711 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3712 if (expr->is_postfix()) {
3713 if (!context()->IsEffect()) {
3714 context()->PlugTOS();
3715 }
3716 } else {
3717 context()->Plug(eax);
3718 }
3719 break;
3720 }
3721 case NAMED_SUPER_PROPERTY: {
3722 EmitNamedSuperPropertyStore(prop);
3723 if (expr->is_postfix()) {
3724 if (!context()->IsEffect()) {
3725 context()->PlugTOS();
3726 }
3727 } else {
3728 context()->Plug(eax);
3729 }
3730 break;
3731 }
3732 case KEYED_SUPER_PROPERTY: {
3733 EmitKeyedSuperPropertyStore(prop);
3734 if (expr->is_postfix()) {
3735 if (!context()->IsEffect()) {
3736 context()->PlugTOS();
3737 }
3738 } else {
3739 context()->Plug(eax);
3740 }
3741 break;
3742 }
3743 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003744 PopOperand(StoreDescriptor::NameRegister());
3745 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003746 Handle<Code> ic =
3747 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3748 EmitLoadStoreICSlot(expr->CountSlot());
3749 CallIC(ic);
3750 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3751 if (expr->is_postfix()) {
3752 // Result is on the stack
3753 if (!context()->IsEffect()) {
3754 context()->PlugTOS();
3755 }
3756 } else {
3757 context()->Plug(eax);
3758 }
3759 break;
3760 }
3761 }
3762}
3763
3764
3765void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3766 Expression* sub_expr,
3767 Handle<String> check) {
3768 Label materialize_true, materialize_false;
3769 Label* if_true = NULL;
3770 Label* if_false = NULL;
3771 Label* fall_through = NULL;
3772 context()->PrepareTest(&materialize_true, &materialize_false,
3773 &if_true, &if_false, &fall_through);
3774
3775 { AccumulatorValueContext context(this);
3776 VisitForTypeofValue(sub_expr);
3777 }
3778 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3779
3780 Factory* factory = isolate()->factory();
3781 if (String::Equals(check, factory->number_string())) {
3782 __ JumpIfSmi(eax, if_true);
3783 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3784 isolate()->factory()->heap_number_map());
3785 Split(equal, if_true, if_false, fall_through);
3786 } else if (String::Equals(check, factory->string_string())) {
3787 __ JumpIfSmi(eax, if_false);
3788 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3789 Split(below, if_true, if_false, fall_through);
3790 } else if (String::Equals(check, factory->symbol_string())) {
3791 __ JumpIfSmi(eax, if_false);
3792 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3793 Split(equal, if_true, if_false, fall_through);
3794 } else if (String::Equals(check, factory->boolean_string())) {
3795 __ cmp(eax, isolate()->factory()->true_value());
3796 __ j(equal, if_true);
3797 __ cmp(eax, isolate()->factory()->false_value());
3798 Split(equal, if_true, if_false, fall_through);
3799 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003800 __ cmp(eax, isolate()->factory()->null_value());
3801 __ j(equal, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003802 __ JumpIfSmi(eax, if_false);
3803 // Check for undetectable objects => true.
3804 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3805 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3806 1 << Map::kIsUndetectable);
3807 Split(not_zero, if_true, if_false, fall_through);
3808 } else if (String::Equals(check, factory->function_string())) {
3809 __ JumpIfSmi(eax, if_false);
3810 // Check for callable and not undetectable objects => true.
3811 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3812 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3813 __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3814 __ cmp(ecx, 1 << Map::kIsCallable);
3815 Split(equal, if_true, if_false, fall_through);
3816 } else if (String::Equals(check, factory->object_string())) {
3817 __ JumpIfSmi(eax, if_false);
3818 __ cmp(eax, isolate()->factory()->null_value());
3819 __ j(equal, if_true);
3820 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3821 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3822 __ j(below, if_false);
3823 // Check for callable or undetectable objects => false.
3824 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3825 (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3826 Split(zero, if_true, if_false, fall_through);
3827// clang-format off
3828#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3829 } else if (String::Equals(check, factory->type##_string())) { \
3830 __ JumpIfSmi(eax, if_false); \
3831 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \
3832 isolate()->factory()->type##_map()); \
3833 Split(equal, if_true, if_false, fall_through);
3834 SIMD128_TYPES(SIMD128_TYPE)
3835#undef SIMD128_TYPE
3836 // clang-format on
3837 } else {
3838 if (if_false != fall_through) __ jmp(if_false);
3839 }
3840 context()->Plug(if_true, if_false);
3841}
3842
3843
3844void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3845 Comment cmnt(masm_, "[ CompareOperation");
3846 SetExpressionPosition(expr);
3847
3848 // First we try a fast inlined version of the compare when one of
3849 // the operands is a literal.
3850 if (TryLiteralCompare(expr)) return;
3851
3852 // Always perform the comparison for its control flow. Pack the result
3853 // into the expression's context after the comparison is performed.
3854 Label materialize_true, materialize_false;
3855 Label* if_true = NULL;
3856 Label* if_false = NULL;
3857 Label* fall_through = NULL;
3858 context()->PrepareTest(&materialize_true, &materialize_false,
3859 &if_true, &if_false, &fall_through);
3860
3861 Token::Value op = expr->op();
3862 VisitForStackValue(expr->left());
3863 switch (op) {
3864 case Token::IN:
3865 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003866 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003867 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3868 __ cmp(eax, isolate()->factory()->true_value());
3869 Split(equal, if_true, if_false, fall_through);
3870 break;
3871
3872 case Token::INSTANCEOF: {
3873 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003874 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003875 InstanceOfStub stub(isolate());
3876 __ CallStub(&stub);
3877 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3878 __ cmp(eax, isolate()->factory()->true_value());
3879 Split(equal, if_true, if_false, fall_through);
3880 break;
3881 }
3882
3883 default: {
3884 VisitForAccumulatorValue(expr->right());
3885 Condition cc = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003886 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003887
3888 bool inline_smi_code = ShouldInlineSmiCase(op);
3889 JumpPatchSite patch_site(masm_);
3890 if (inline_smi_code) {
3891 Label slow_case;
3892 __ mov(ecx, edx);
3893 __ or_(ecx, eax);
3894 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3895 __ cmp(edx, eax);
3896 Split(cc, if_true, if_false, NULL);
3897 __ bind(&slow_case);
3898 }
3899
Ben Murdoch097c5b22016-05-18 11:27:45 +01003900 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003901 CallIC(ic, expr->CompareOperationFeedbackId());
3902 patch_site.EmitPatchInfo();
3903
3904 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3905 __ test(eax, eax);
3906 Split(cc, if_true, if_false, fall_through);
3907 }
3908 }
3909
3910 // Convert the result of the comparison into one expected for this
3911 // expression's context.
3912 context()->Plug(if_true, if_false);
3913}
3914
3915
3916void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3917 Expression* sub_expr,
3918 NilValue nil) {
3919 Label materialize_true, materialize_false;
3920 Label* if_true = NULL;
3921 Label* if_false = NULL;
3922 Label* fall_through = NULL;
3923 context()->PrepareTest(&materialize_true, &materialize_false,
3924 &if_true, &if_false, &fall_through);
3925
3926 VisitForAccumulatorValue(sub_expr);
3927 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3928
3929 Handle<Object> nil_value = nil == kNullValue
3930 ? isolate()->factory()->null_value()
3931 : isolate()->factory()->undefined_value();
3932 if (expr->op() == Token::EQ_STRICT) {
3933 __ cmp(eax, nil_value);
3934 Split(equal, if_true, if_false, fall_through);
3935 } else {
3936 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
3937 CallIC(ic, expr->CompareOperationFeedbackId());
3938 __ cmp(eax, isolate()->factory()->true_value());
3939 Split(equal, if_true, if_false, fall_through);
3940 }
3941 context()->Plug(if_true, if_false);
3942}
3943
3944
3945void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
3946 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3947 context()->Plug(eax);
3948}
3949
3950
3951Register FullCodeGenerator::result_register() {
3952 return eax;
3953}
3954
3955
3956Register FullCodeGenerator::context_register() {
3957 return esi;
3958}
3959
3960
3961void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3962 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3963 __ mov(Operand(ebp, frame_offset), value);
3964}
3965
3966
3967void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3968 __ mov(dst, ContextOperand(esi, context_index));
3969}
3970
3971
3972void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3973 Scope* closure_scope = scope()->ClosureScope();
3974 if (closure_scope->is_script_scope() ||
3975 closure_scope->is_module_scope()) {
3976 // Contexts nested in the native context have a canonical empty function
3977 // as their closure, not the anonymous closure containing the global
3978 // code.
3979 __ mov(eax, NativeContextOperand());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003980 PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003981 } else if (closure_scope->is_eval_scope()) {
3982 // Contexts nested inside eval code have the same closure as the context
3983 // calling eval, not the anonymous closure containing the eval code.
3984 // Fetch it from the context.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003985 PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003986 } else {
3987 DCHECK(closure_scope->is_function_scope());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003988 PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003989 }
3990}
3991
3992
3993// ----------------------------------------------------------------------------
3994// Non-local control flow support.
3995
3996void FullCodeGenerator::EnterFinallyBlock() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003997 // Store pending message while executing finally block.
3998 ExternalReference pending_message_obj =
3999 ExternalReference::address_of_pending_message_obj(isolate());
4000 __ mov(edx, Operand::StaticVariable(pending_message_obj));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004001 PushOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004002
4003 ClearPendingMessage();
4004}
4005
4006
4007void FullCodeGenerator::ExitFinallyBlock() {
4008 DCHECK(!result_register().is(edx));
4009 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004010 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004011 ExternalReference pending_message_obj =
4012 ExternalReference::address_of_pending_message_obj(isolate());
4013 __ mov(Operand::StaticVariable(pending_message_obj), edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004014}
4015
4016
4017void FullCodeGenerator::ClearPendingMessage() {
4018 DCHECK(!result_register().is(edx));
4019 ExternalReference pending_message_obj =
4020 ExternalReference::address_of_pending_message_obj(isolate());
4021 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
4022 __ mov(Operand::StaticVariable(pending_message_obj), edx);
4023}
4024
4025
4026void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4027 DCHECK(!slot.IsInvalid());
4028 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
4029 Immediate(SmiFromSlot(slot)));
4030}
4031
Ben Murdoch097c5b22016-05-18 11:27:45 +01004032void FullCodeGenerator::DeferredCommands::EmitCommands() {
4033 DCHECK(!result_register().is(edx));
4034 __ Pop(result_register()); // Restore the accumulator.
4035 __ Pop(edx); // Get the token.
4036 for (DeferredCommand cmd : commands_) {
4037 Label skip;
4038 __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
4039 __ j(not_equal, &skip);
4040 switch (cmd.command) {
4041 case kReturn:
4042 codegen_->EmitUnwindAndReturn();
4043 break;
4044 case kThrow:
4045 __ Push(result_register());
4046 __ CallRuntime(Runtime::kReThrow);
4047 break;
4048 case kContinue:
4049 codegen_->EmitContinue(cmd.target);
4050 break;
4051 case kBreak:
4052 codegen_->EmitBreak(cmd.target);
4053 break;
4054 }
4055 __ bind(&skip);
4056 }
4057}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004058
4059#undef __
4060
4061
4062static const byte kJnsInstruction = 0x79;
4063static const byte kJnsOffset = 0x11;
4064static const byte kNopByteOne = 0x66;
4065static const byte kNopByteTwo = 0x90;
4066#ifdef DEBUG
4067static const byte kCallInstruction = 0xe8;
4068#endif
4069
4070
4071void BackEdgeTable::PatchAt(Code* unoptimized_code,
4072 Address pc,
4073 BackEdgeState target_state,
4074 Code* replacement_code) {
4075 Address call_target_address = pc - kIntSize;
4076 Address jns_instr_address = call_target_address - 3;
4077 Address jns_offset_address = call_target_address - 2;
4078
4079 switch (target_state) {
4080 case INTERRUPT:
4081 // sub <profiling_counter>, <delta> ;; Not changed
4082 // jns ok
4083 // call <interrupt stub>
4084 // ok:
4085 *jns_instr_address = kJnsInstruction;
4086 *jns_offset_address = kJnsOffset;
4087 break;
4088 case ON_STACK_REPLACEMENT:
4089 case OSR_AFTER_STACK_CHECK:
4090 // sub <profiling_counter>, <delta> ;; Not changed
4091 // nop
4092 // nop
4093 // call <on-stack replacment>
4094 // ok:
4095 *jns_instr_address = kNopByteOne;
4096 *jns_offset_address = kNopByteTwo;
4097 break;
4098 }
4099
4100 Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
4101 call_target_address, unoptimized_code,
4102 replacement_code->entry());
4103 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4104 unoptimized_code, call_target_address, replacement_code);
4105}
4106
4107
4108BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4109 Isolate* isolate,
4110 Code* unoptimized_code,
4111 Address pc) {
4112 Address call_target_address = pc - kIntSize;
4113 Address jns_instr_address = call_target_address - 3;
4114 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4115
4116 if (*jns_instr_address == kJnsInstruction) {
4117 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4118 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4119 Assembler::target_address_at(call_target_address,
4120 unoptimized_code));
4121 return INTERRUPT;
4122 }
4123
4124 DCHECK_EQ(kNopByteOne, *jns_instr_address);
4125 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4126
4127 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4128 isolate->builtins()->OnStackReplacement()->entry()) {
4129 return ON_STACK_REPLACEMENT;
4130 }
4131
4132 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4133 Assembler::target_address_at(call_target_address,
4134 unoptimized_code));
4135 return OSR_AFTER_STACK_CHECK;
4136}
4137
4138
4139} // namespace internal
4140} // namespace v8
4141
4142#endif // V8_TARGET_ARCH_IA32