blob: 36b7c5d63687ebce3b2d736ad63a0f96c5812c48 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X87
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15#include "src/x87/frames-x87.h"
16
17namespace v8 {
18namespace internal {
19
Ben Murdoch097c5b22016-05-18 11:27:45 +010020#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021
22class JumpPatchSite BASE_EMBEDDED {
23 public:
24 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
25#ifdef DEBUG
26 info_emitted_ = false;
27#endif
28 }
29
30 ~JumpPatchSite() {
31 DCHECK(patch_site_.is_bound() == info_emitted_);
32 }
33
34 void EmitJumpIfNotSmi(Register reg,
35 Label* target,
36 Label::Distance distance = Label::kFar) {
37 __ test(reg, Immediate(kSmiTagMask));
38 EmitJump(not_carry, target, distance); // Always taken before patched.
39 }
40
41 void EmitJumpIfSmi(Register reg,
42 Label* target,
43 Label::Distance distance = Label::kFar) {
44 __ test(reg, Immediate(kSmiTagMask));
45 EmitJump(carry, target, distance); // Never taken before patched.
46 }
47
48 void EmitPatchInfo() {
49 if (patch_site_.is_bound()) {
50 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
51 DCHECK(is_uint8(delta_to_patch_site));
52 __ test(eax, Immediate(delta_to_patch_site));
53#ifdef DEBUG
54 info_emitted_ = true;
55#endif
56 } else {
57 __ nop(); // Signals no inlined code.
58 }
59 }
60
61 private:
62 // jc will be patched with jz, jnc will become jnz.
63 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
64 DCHECK(!patch_site_.is_bound() && !info_emitted_);
65 DCHECK(cc == carry || cc == not_carry);
66 __ bind(&patch_site_);
67 __ j(cc, target, distance);
68 }
69
Ben Murdoch097c5b22016-05-18 11:27:45 +010070 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 MacroAssembler* masm_;
72 Label patch_site_;
73#ifdef DEBUG
74 bool info_emitted_;
75#endif
76};
77
78
79// Generate code for a JS function. On entry to the function the receiver
80// and arguments have been pushed on the stack left to right, with the
81// return address on top of them. The actual argument count matches the
82// formal parameter count expected by the function.
83//
84// The live registers are:
85// o edi: the JS function object being called (i.e. ourselves)
86// o edx: the new target value
87// o esi: our context
88// o ebp: our caller's frame pointer
89// o esp: stack pointer (pointing to return address)
90//
91// The function builds a JS frame. Please see JavaScriptFrameConstants in
92// frames-x87.h for its layout.
93void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
95 profiling_counter_ = isolate()->factory()->NewCell(
96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97 SetFunctionPosition(literal());
98 Comment cmnt(masm_, "[ function compiled by full code generator");
99
100 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
103 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
104 __ mov(ecx, Operand(esp, receiver_offset));
105 __ AssertNotSmi(ecx);
106 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
107 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
108 }
109
110 // Open a frame scope to indicate that there is a frame on the stack. The
111 // MANUAL indicates that the scope shouldn't actually generate code to set up
112 // the frame (that is done below).
113 FrameScope frame_scope(masm_, StackFrame::MANUAL);
114
115 info->set_prologue_offset(masm_->pc_offset());
116 __ Prologue(info->GeneratePreagedPrologue());
117
118 { Comment cmnt(masm_, "[ Allocate locals");
119 int locals_count = info->scope()->num_stack_slots();
120 // Generators allocate locals, if any, in context slots.
121 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100122 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 if (locals_count == 1) {
124 __ push(Immediate(isolate()->factory()->undefined_value()));
125 } else if (locals_count > 1) {
126 if (locals_count >= 128) {
127 Label ok;
128 __ mov(ecx, esp);
129 __ sub(ecx, Immediate(locals_count * kPointerSize));
130 ExternalReference stack_limit =
131 ExternalReference::address_of_real_stack_limit(isolate());
132 __ cmp(ecx, Operand::StaticVariable(stack_limit));
133 __ j(above_equal, &ok, Label::kNear);
134 __ CallRuntime(Runtime::kThrowStackOverflow);
135 __ bind(&ok);
136 }
137 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
138 const int kMaxPushes = 32;
139 if (locals_count >= kMaxPushes) {
140 int loop_iterations = locals_count / kMaxPushes;
141 __ mov(ecx, loop_iterations);
142 Label loop_header;
143 __ bind(&loop_header);
144 // Do pushes.
145 for (int i = 0; i < kMaxPushes; i++) {
146 __ push(eax);
147 }
148 __ dec(ecx);
149 __ j(not_zero, &loop_header, Label::kNear);
150 }
151 int remaining = locals_count % kMaxPushes;
152 // Emit the remaining pushes.
153 for (int i = 0; i < remaining; i++) {
154 __ push(eax);
155 }
156 }
157 }
158
159 bool function_in_register = true;
160
161 // Possibly allocate a local context.
162 if (info->scope()->num_heap_slots() > 0) {
163 Comment cmnt(masm_, "[ Allocate context");
164 bool need_write_barrier = true;
165 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
166 // Argument to NewContext is the function, which is still in edi.
167 if (info->scope()->is_script_scope()) {
168 __ push(edi);
169 __ Push(info->scope()->GetScopeInfo(info->isolate()));
170 __ CallRuntime(Runtime::kNewScriptContext);
171 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
172 // The new target value is not used, clobbering is safe.
173 DCHECK_NULL(info->scope()->new_target_var());
174 } else {
175 if (info->scope()->new_target_var() != nullptr) {
176 __ push(edx); // Preserve new target.
177 }
178 if (slots <= FastNewContextStub::kMaximumSlots) {
179 FastNewContextStub stub(isolate(), slots);
180 __ CallStub(&stub);
181 // Result of FastNewContextStub is always in new space.
182 need_write_barrier = false;
183 } else {
184 __ push(edi);
185 __ CallRuntime(Runtime::kNewFunctionContext);
186 }
187 if (info->scope()->new_target_var() != nullptr) {
188 __ pop(edx); // Restore new target.
189 }
190 }
191 function_in_register = false;
192 // Context is returned in eax. It replaces the context passed to us.
193 // It's saved in the stack and kept live in esi.
194 __ mov(esi, eax);
195 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
196
197 // Copy parameters into context if necessary.
198 int num_parameters = info->scope()->num_parameters();
199 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
200 for (int i = first_parameter; i < num_parameters; i++) {
201 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
202 if (var->IsContextSlot()) {
203 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204 (num_parameters - 1 - i) * kPointerSize;
205 // Load parameter from stack.
206 __ mov(eax, Operand(ebp, parameter_offset));
207 // Store it in the context.
208 int context_offset = Context::SlotOffset(var->index());
209 __ mov(Operand(esi, context_offset), eax);
210 // Update the write barrier. This clobbers eax and ebx.
211 if (need_write_barrier) {
212 __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
213 kDontSaveFPRegs);
214 } else if (FLAG_debug_code) {
215 Label done;
216 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
217 __ Abort(kExpectedNewSpaceObject);
218 __ bind(&done);
219 }
220 }
221 }
222 }
223
224 // Register holding this function and new target are both trashed in case we
225 // bailout here. But since that can happen only when new target is not used
226 // and we allocate a context, the value of |function_in_register| is correct.
227 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
228
229 // Possibly set up a local binding to the this function which is used in
230 // derived constructors with super calls.
231 Variable* this_function_var = scope()->this_function_var();
232 if (this_function_var != nullptr) {
233 Comment cmnt(masm_, "[ This function");
234 if (!function_in_register) {
235 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
236 // The write barrier clobbers register again, keep it marked as such.
237 }
238 SetVar(this_function_var, edi, ebx, ecx);
239 }
240
241 // Possibly set up a local binding to the new target value.
242 Variable* new_target_var = scope()->new_target_var();
243 if (new_target_var != nullptr) {
244 Comment cmnt(masm_, "[ new.target");
245 SetVar(new_target_var, edx, ebx, ecx);
246 }
247
248 // Possibly allocate RestParameters
249 int rest_index;
250 Variable* rest_param = scope()->rest_parameter(&rest_index);
251 if (rest_param) {
252 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100253 if (!function_in_register) {
254 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
255 }
256 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100258 function_in_register = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000259 SetVar(rest_param, eax, ebx, edx);
260 }
261
262 Variable* arguments = scope()->arguments();
263 if (arguments != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100264 // Arguments object must be allocated after the context object, in
265 // case the "arguments" or ".arguments" variables are in the context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 if (!function_in_register) {
268 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
269 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100270 if (is_strict(language_mode()) || !has_simple_parameters()) {
271 FastNewStrictArgumentsStub stub(isolate());
272 __ CallStub(&stub);
273 } else if (literal()->has_duplicate_parameters()) {
274 __ Push(edi);
275 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
276 } else {
277 FastNewSloppyArgumentsStub stub(isolate());
278 __ CallStub(&stub);
279 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280
281 SetVar(arguments, eax, ebx, edx);
282 }
283
284 if (FLAG_trace) {
285 __ CallRuntime(Runtime::kTraceEnter);
286 }
287
288 // Visit the declarations and body unless there is an illegal
289 // redeclaration.
290 if (scope()->HasIllegalRedeclaration()) {
291 Comment cmnt(masm_, "[ Declarations");
292 VisitForEffect(scope()->GetIllegalRedeclaration());
293
294 } else {
295 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
296 { Comment cmnt(masm_, "[ Declarations");
297 VisitDeclarations(scope()->declarations());
298 }
299
300 // Assert that the declarations do not use ICs. Otherwise the debugger
301 // won't be able to redirect a PC at an IC to the correct IC in newly
302 // recompiled code.
303 DCHECK_EQ(0, ic_total_count_);
304
305 { Comment cmnt(masm_, "[ Stack check");
306 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
307 Label ok;
308 ExternalReference stack_limit
309 = ExternalReference::address_of_stack_limit(isolate());
310 __ cmp(esp, Operand::StaticVariable(stack_limit));
311 __ j(above_equal, &ok, Label::kNear);
312 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
313 __ bind(&ok);
314 }
315
316 { Comment cmnt(masm_, "[ Body");
317 DCHECK(loop_depth() == 0);
318 VisitStatements(literal()->body());
319 DCHECK(loop_depth() == 0);
320 }
321 }
322
323 // Always emit a 'return undefined' in case control fell off the end of
324 // the body.
325 { Comment cmnt(masm_, "[ return <undefined>;");
326 __ mov(eax, isolate()->factory()->undefined_value());
327 EmitReturnSequence();
328 }
329}
330
331
332void FullCodeGenerator::ClearAccumulator() {
333 __ Move(eax, Immediate(Smi::FromInt(0)));
334}
335
336
337void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
338 __ mov(ebx, Immediate(profiling_counter_));
339 __ sub(FieldOperand(ebx, Cell::kValueOffset),
340 Immediate(Smi::FromInt(delta)));
341}
342
343
344void FullCodeGenerator::EmitProfilingCounterReset() {
345 int reset_value = FLAG_interrupt_budget;
346 __ mov(ebx, Immediate(profiling_counter_));
347 __ mov(FieldOperand(ebx, Cell::kValueOffset),
348 Immediate(Smi::FromInt(reset_value)));
349}
350
351
352void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
353 Label* back_edge_target) {
354 Comment cmnt(masm_, "[ Back edge bookkeeping");
355 Label ok;
356
357 DCHECK(back_edge_target->is_bound());
358 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
359 int weight = Min(kMaxBackEdgeWeight,
360 Max(1, distance / kCodeSizeMultiplier));
361 EmitProfilingCounterDecrement(weight);
362 __ j(positive, &ok, Label::kNear);
363 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
364
365 // Record a mapping of this PC offset to the OSR id. This is used to find
366 // the AST id from the unoptimized code in order to use it as a key into
367 // the deoptimization input data found in the optimized code.
368 RecordBackEdge(stmt->OsrEntryId());
369
370 EmitProfilingCounterReset();
371
372 __ bind(&ok);
373 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
374 // Record a mapping of the OSR id to this PC. This is used if the OSR
375 // entry becomes the target of a bailout. We don't expect it to be, but
376 // we want it to work if it is.
377 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
378}
379
Ben Murdoch097c5b22016-05-18 11:27:45 +0100380void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
381 bool is_tail_call) {
382 // Pretend that the exit is a backwards jump to the entry.
383 int weight = 1;
384 if (info_->ShouldSelfOptimize()) {
385 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
386 } else {
387 int distance = masm_->pc_offset();
388 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
389 }
390 EmitProfilingCounterDecrement(weight);
391 Label ok;
392 __ j(positive, &ok, Label::kNear);
393 // Don't need to save result register if we are going to do a tail call.
394 if (!is_tail_call) {
395 __ push(eax);
396 }
397 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
398 if (!is_tail_call) {
399 __ pop(eax);
400 }
401 EmitProfilingCounterReset();
402 __ bind(&ok);
403}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000404
405void FullCodeGenerator::EmitReturnSequence() {
406 Comment cmnt(masm_, "[ Return sequence");
407 if (return_label_.is_bound()) {
408 __ jmp(&return_label_);
409 } else {
410 // Common return label
411 __ bind(&return_label_);
412 if (FLAG_trace) {
413 __ push(eax);
414 __ CallRuntime(Runtime::kTraceExit);
415 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100416 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000417
418 SetReturnPosition(literal());
419 __ leave();
420
421 int arg_count = info_->scope()->num_parameters() + 1;
422 int arguments_bytes = arg_count * kPointerSize;
423 __ Ret(arguments_bytes, ecx);
424 }
425}
426
427
428void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
429 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
430 MemOperand operand = codegen()->VarOperand(var, result_register());
431 // Memory operands can be pushed directly.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100432 codegen()->PushOperand(operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433}
434
435
436void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
437 UNREACHABLE(); // Not used on X87.
438}
439
440
441void FullCodeGenerator::AccumulatorValueContext::Plug(
442 Heap::RootListIndex index) const {
443 UNREACHABLE(); // Not used on X87.
444}
445
446
447void FullCodeGenerator::StackValueContext::Plug(
448 Heap::RootListIndex index) const {
449 UNREACHABLE(); // Not used on X87.
450}
451
452
453void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
454 UNREACHABLE(); // Not used on X87.
455}
456
457
458void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
459}
460
461
462void FullCodeGenerator::AccumulatorValueContext::Plug(
463 Handle<Object> lit) const {
464 if (lit->IsSmi()) {
465 __ SafeMove(result_register(), Immediate(lit));
466 } else {
467 __ Move(result_register(), Immediate(lit));
468 }
469}
470
471
472void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100473 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000474 if (lit->IsSmi()) {
475 __ SafePush(Immediate(lit));
476 } else {
477 __ push(Immediate(lit));
478 }
479}
480
481
482void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
483 codegen()->PrepareForBailoutBeforeSplit(condition(),
484 true,
485 true_label_,
486 false_label_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100487 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000488 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
489 if (false_label_ != fall_through_) __ jmp(false_label_);
490 } else if (lit->IsTrue() || lit->IsJSObject()) {
491 if (true_label_ != fall_through_) __ jmp(true_label_);
492 } else if (lit->IsString()) {
493 if (String::cast(*lit)->length() == 0) {
494 if (false_label_ != fall_through_) __ jmp(false_label_);
495 } else {
496 if (true_label_ != fall_through_) __ jmp(true_label_);
497 }
498 } else if (lit->IsSmi()) {
499 if (Smi::cast(*lit)->value() == 0) {
500 if (false_label_ != fall_through_) __ jmp(false_label_);
501 } else {
502 if (true_label_ != fall_through_) __ jmp(true_label_);
503 }
504 } else {
505 // For simplicity we always test the accumulator register.
506 __ mov(result_register(), lit);
507 codegen()->DoTest(this);
508 }
509}
510
511
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000512void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
513 Register reg) const {
514 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100515 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000516 __ mov(Operand(esp, 0), reg);
517}
518
519
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
521 Label* materialize_false) const {
522 DCHECK(materialize_true == materialize_false);
523 __ bind(materialize_true);
524}
525
526
527void FullCodeGenerator::AccumulatorValueContext::Plug(
528 Label* materialize_true,
529 Label* materialize_false) const {
530 Label done;
531 __ bind(materialize_true);
532 __ mov(result_register(), isolate()->factory()->true_value());
533 __ jmp(&done, Label::kNear);
534 __ bind(materialize_false);
535 __ mov(result_register(), isolate()->factory()->false_value());
536 __ bind(&done);
537}
538
539
540void FullCodeGenerator::StackValueContext::Plug(
541 Label* materialize_true,
542 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100543 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544 Label done;
545 __ bind(materialize_true);
546 __ push(Immediate(isolate()->factory()->true_value()));
547 __ jmp(&done, Label::kNear);
548 __ bind(materialize_false);
549 __ push(Immediate(isolate()->factory()->false_value()));
550 __ bind(&done);
551}
552
553
554void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
555 Label* materialize_false) const {
556 DCHECK(materialize_true == true_label_);
557 DCHECK(materialize_false == false_label_);
558}
559
560
561void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
562 Handle<Object> value = flag
563 ? isolate()->factory()->true_value()
564 : isolate()->factory()->false_value();
565 __ mov(result_register(), value);
566}
567
568
569void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100570 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000571 Handle<Object> value = flag
572 ? isolate()->factory()->true_value()
573 : isolate()->factory()->false_value();
574 __ push(Immediate(value));
575}
576
577
578void FullCodeGenerator::TestContext::Plug(bool flag) const {
579 codegen()->PrepareForBailoutBeforeSplit(condition(),
580 true,
581 true_label_,
582 false_label_);
583 if (flag) {
584 if (true_label_ != fall_through_) __ jmp(true_label_);
585 } else {
586 if (false_label_ != fall_through_) __ jmp(false_label_);
587 }
588}
589
590
591void FullCodeGenerator::DoTest(Expression* condition,
592 Label* if_true,
593 Label* if_false,
594 Label* fall_through) {
595 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
596 CallIC(ic, condition->test_id());
597 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
598 Split(equal, if_true, if_false, fall_through);
599}
600
601
602void FullCodeGenerator::Split(Condition cc,
603 Label* if_true,
604 Label* if_false,
605 Label* fall_through) {
606 if (if_false == fall_through) {
607 __ j(cc, if_true);
608 } else if (if_true == fall_through) {
609 __ j(NegateCondition(cc), if_false);
610 } else {
611 __ j(cc, if_true);
612 __ jmp(if_false);
613 }
614}
615
616
617MemOperand FullCodeGenerator::StackOperand(Variable* var) {
618 DCHECK(var->IsStackAllocated());
619 // Offset is negative because higher indexes are at lower addresses.
620 int offset = -var->index() * kPointerSize;
621 // Adjust by a (parameter or local) base offset.
622 if (var->IsParameter()) {
623 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
624 } else {
625 offset += JavaScriptFrameConstants::kLocal0Offset;
626 }
627 return Operand(ebp, offset);
628}
629
630
631MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
632 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
633 if (var->IsContextSlot()) {
634 int context_chain_length = scope()->ContextChainLength(var->scope());
635 __ LoadContext(scratch, context_chain_length);
636 return ContextOperand(scratch, var->index());
637 } else {
638 return StackOperand(var);
639 }
640}
641
642
643void FullCodeGenerator::GetVar(Register dest, Variable* var) {
644 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
645 MemOperand location = VarOperand(var, dest);
646 __ mov(dest, location);
647}
648
649
650void FullCodeGenerator::SetVar(Variable* var,
651 Register src,
652 Register scratch0,
653 Register scratch1) {
654 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
655 DCHECK(!scratch0.is(src));
656 DCHECK(!scratch0.is(scratch1));
657 DCHECK(!scratch1.is(src));
658 MemOperand location = VarOperand(var, scratch0);
659 __ mov(location, src);
660
661 // Emit the write barrier code if the location is in the heap.
662 if (var->IsContextSlot()) {
663 int offset = Context::SlotOffset(var->index());
664 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
665 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
666 }
667}
668
669
670void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
671 bool should_normalize,
672 Label* if_true,
673 Label* if_false) {
674 // Only prepare for bailouts before splits if we're in a test
675 // context. Otherwise, we let the Visit function deal with the
676 // preparation to avoid preparing with the same AST id twice.
677 if (!context()->IsTest()) return;
678
679 Label skip;
680 if (should_normalize) __ jmp(&skip, Label::kNear);
681 PrepareForBailout(expr, TOS_REG);
682 if (should_normalize) {
683 __ cmp(eax, isolate()->factory()->true_value());
684 Split(equal, if_true, if_false, NULL);
685 __ bind(&skip);
686 }
687}
688
689
690void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
691 // The variable in the declaration always resides in the current context.
692 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100693 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694 // Check that we're not inside a with or catch context.
695 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
696 __ cmp(ebx, isolate()->factory()->with_context_map());
697 __ Check(not_equal, kDeclarationInWithContext);
698 __ cmp(ebx, isolate()->factory()->catch_context_map());
699 __ Check(not_equal, kDeclarationInCatchContext);
700 }
701}
702
703
704void FullCodeGenerator::VisitVariableDeclaration(
705 VariableDeclaration* declaration) {
706 // If it was not possible to allocate the variable at compile time, we
707 // need to "declare" it at runtime to make sure it actually exists in the
708 // local context.
709 VariableProxy* proxy = declaration->proxy();
710 VariableMode mode = declaration->mode();
711 Variable* variable = proxy->var();
712 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
713 switch (variable->location()) {
714 case VariableLocation::GLOBAL:
715 case VariableLocation::UNALLOCATED:
716 globals_->Add(variable->name(), zone());
717 globals_->Add(variable->binding_needs_init()
718 ? isolate()->factory()->the_hole_value()
719 : isolate()->factory()->undefined_value(), zone());
720 break;
721
722 case VariableLocation::PARAMETER:
723 case VariableLocation::LOCAL:
724 if (hole_init) {
725 Comment cmnt(masm_, "[ VariableDeclaration");
726 __ mov(StackOperand(variable),
727 Immediate(isolate()->factory()->the_hole_value()));
728 }
729 break;
730
731 case VariableLocation::CONTEXT:
732 if (hole_init) {
733 Comment cmnt(masm_, "[ VariableDeclaration");
734 EmitDebugCheckDeclarationContext(variable);
735 __ mov(ContextOperand(esi, variable->index()),
736 Immediate(isolate()->factory()->the_hole_value()));
737 // No write barrier since the hole value is in old space.
738 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
739 }
740 break;
741
742 case VariableLocation::LOOKUP: {
743 Comment cmnt(masm_, "[ VariableDeclaration");
744 __ push(Immediate(variable->name()));
745 // VariableDeclaration nodes are always introduced in one of four modes.
746 DCHECK(IsDeclaredVariableMode(mode));
747 // Push initial value, if any.
748 // Note: For variables we must not push an initial value (such as
749 // 'undefined') because we may have a (legal) redeclaration and we
750 // must not destroy the current value.
751 if (hole_init) {
752 __ push(Immediate(isolate()->factory()->the_hole_value()));
753 } else {
754 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
755 }
756 __ push(
757 Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
758 __ CallRuntime(Runtime::kDeclareLookupSlot);
759 break;
760 }
761 }
762}
763
764void FullCodeGenerator::VisitFunctionDeclaration(
765 FunctionDeclaration* declaration) {
766 VariableProxy* proxy = declaration->proxy();
767 Variable* variable = proxy->var();
768 switch (variable->location()) {
769 case VariableLocation::GLOBAL:
770 case VariableLocation::UNALLOCATED: {
771 globals_->Add(variable->name(), zone());
772 Handle<SharedFunctionInfo> function =
773 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
774 // Check for stack-overflow exception.
775 if (function.is_null()) return SetStackOverflow();
776 globals_->Add(function, zone());
777 break;
778 }
779
780 case VariableLocation::PARAMETER:
781 case VariableLocation::LOCAL: {
782 Comment cmnt(masm_, "[ FunctionDeclaration");
783 VisitForAccumulatorValue(declaration->fun());
784 __ mov(StackOperand(variable), result_register());
785 break;
786 }
787
788 case VariableLocation::CONTEXT: {
789 Comment cmnt(masm_, "[ FunctionDeclaration");
790 EmitDebugCheckDeclarationContext(variable);
791 VisitForAccumulatorValue(declaration->fun());
792 __ mov(ContextOperand(esi, variable->index()), result_register());
793 // We know that we have written a function, which is not a smi.
794 __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
795 result_register(), ecx, kDontSaveFPRegs,
796 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
797 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
798 break;
799 }
800
801 case VariableLocation::LOOKUP: {
802 Comment cmnt(masm_, "[ FunctionDeclaration");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100803 PushOperand(variable->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100805 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
806 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000807 break;
808 }
809 }
810}
811
812
813void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
814 // Call the runtime to declare the globals.
815 __ Push(pairs);
816 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
817 __ CallRuntime(Runtime::kDeclareGlobals);
818 // Return value is ignored.
819}
820
821
822void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
823 // Call the runtime to declare the modules.
824 __ Push(descriptions);
825 __ CallRuntime(Runtime::kDeclareModules);
826 // Return value is ignored.
827}
828
829
830void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
831 Comment cmnt(masm_, "[ SwitchStatement");
832 Breakable nested_statement(this, stmt);
833 SetStatementPosition(stmt);
834
835 // Keep the switch value on the stack until a case matches.
836 VisitForStackValue(stmt->tag());
837 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
838
839 ZoneList<CaseClause*>* clauses = stmt->cases();
840 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
841
842 Label next_test; // Recycled for each test.
843 // Compile all the tests with branches to their bodies.
844 for (int i = 0; i < clauses->length(); i++) {
845 CaseClause* clause = clauses->at(i);
846 clause->body_target()->Unuse();
847
848 // The default is not a test, but remember it as final fall through.
849 if (clause->is_default()) {
850 default_clause = clause;
851 continue;
852 }
853
854 Comment cmnt(masm_, "[ Case comparison");
855 __ bind(&next_test);
856 next_test.Unuse();
857
858 // Compile the label expression.
859 VisitForAccumulatorValue(clause->label());
860
861 // Perform the comparison as if via '==='.
862 __ mov(edx, Operand(esp, 0)); // Switch value.
863 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
864 JumpPatchSite patch_site(masm_);
865 if (inline_smi_code) {
866 Label slow_case;
867 __ mov(ecx, edx);
868 __ or_(ecx, eax);
869 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
870
871 __ cmp(edx, eax);
872 __ j(not_equal, &next_test);
873 __ Drop(1); // Switch value is no longer needed.
874 __ jmp(clause->body_target());
875 __ bind(&slow_case);
876 }
877
878 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100879 Handle<Code> ic =
880 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000881 CallIC(ic, clause->CompareId());
882 patch_site.EmitPatchInfo();
883
884 Label skip;
885 __ jmp(&skip, Label::kNear);
886 PrepareForBailout(clause, TOS_REG);
887 __ cmp(eax, isolate()->factory()->true_value());
888 __ j(not_equal, &next_test);
889 __ Drop(1);
890 __ jmp(clause->body_target());
891 __ bind(&skip);
892
893 __ test(eax, eax);
894 __ j(not_equal, &next_test);
895 __ Drop(1); // Switch value is no longer needed.
896 __ jmp(clause->body_target());
897 }
898
899 // Discard the test value and jump to the default if present, otherwise to
900 // the end of the statement.
901 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100902 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000903 if (default_clause == NULL) {
904 __ jmp(nested_statement.break_label());
905 } else {
906 __ jmp(default_clause->body_target());
907 }
908
909 // Compile all the case bodies.
910 for (int i = 0; i < clauses->length(); i++) {
911 Comment cmnt(masm_, "[ Case body");
912 CaseClause* clause = clauses->at(i);
913 __ bind(clause->body_target());
914 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
915 VisitStatements(clause->statements());
916 }
917
918 __ bind(nested_statement.break_label());
919 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
920}
921
922
923void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
924 Comment cmnt(masm_, "[ ForInStatement");
925 SetStatementPosition(stmt, SKIP_BREAK);
926
927 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
928
929 Label loop, exit;
930 ForIn loop_statement(this, stmt);
931 increment_loop_depth();
932
Ben Murdoch097c5b22016-05-18 11:27:45 +0100933 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000934 SetExpressionAsStatementPosition(stmt->enumerable());
935 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100936 OperandStackDepthIncrement(ForIn::kElementCount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000937
Ben Murdoch097c5b22016-05-18 11:27:45 +0100938 // If the object is null or undefined, skip over the loop, otherwise convert
939 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000940 Label convert, done_convert;
941 __ JumpIfSmi(eax, &convert, Label::kNear);
942 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
943 __ j(above_equal, &done_convert, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100944 __ cmp(eax, isolate()->factory()->undefined_value());
945 __ j(equal, &exit);
946 __ cmp(eax, isolate()->factory()->null_value());
947 __ j(equal, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948 __ bind(&convert);
949 ToObjectStub stub(isolate());
950 __ CallStub(&stub);
951 __ bind(&done_convert);
952 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
953 __ push(eax);
954
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000955 // Check cache validity in generated code. This is a fast case for
956 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
957 // guarantee cache validity, call the runtime system to check cache
958 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100959 // Note: Proxies never have an enum cache, so will always take the
960 // slow path.
961 Label call_runtime, use_cache, fixed_array;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000962 __ CheckEnumCache(&call_runtime);
963
964 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
965 __ jmp(&use_cache, Label::kNear);
966
967 // Get the set of properties to enumerate.
968 __ bind(&call_runtime);
969 __ push(eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100970 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000971 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
972 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
973 isolate()->factory()->meta_map());
974 __ j(not_equal, &fixed_array);
975
976
977 // We got a map in register eax. Get the enumeration cache from it.
978 Label no_descriptors;
979 __ bind(&use_cache);
980
981 __ EnumLength(edx, eax);
982 __ cmp(edx, Immediate(Smi::FromInt(0)));
983 __ j(equal, &no_descriptors);
984
985 __ LoadInstanceDescriptors(eax, ecx);
986 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
987 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
988
989 // Set up the four remaining stack slots.
990 __ push(eax); // Map.
991 __ push(ecx); // Enumeration cache.
992 __ push(edx); // Number of valid entries for the map in the enum cache.
993 __ push(Immediate(Smi::FromInt(0))); // Initial index.
994 __ jmp(&loop);
995
996 __ bind(&no_descriptors);
997 __ add(esp, Immediate(kPointerSize));
998 __ jmp(&exit);
999
1000 // We got a fixed array in register eax. Iterate through that.
1001 __ bind(&fixed_array);
1002
1003 // No need for a write barrier, we are storing a Smi in the feedback vector.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001004 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 __ EmitLoadTypeFeedbackVector(ebx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1007 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1008 __ push(Immediate(Smi::FromInt(1))); // Smi(1) undicates slow check
1009 __ push(eax); // Array
1010 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1011 __ push(eax); // Fixed array length (as smi).
Ben Murdoch097c5b22016-05-18 11:27:45 +01001012 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001013 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1014
1015 // Generate code for doing the condition check.
1016 __ bind(&loop);
1017 SetExpressionAsStatementPosition(stmt->each());
1018
1019 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1020 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1021 __ j(above_equal, loop_statement.break_label());
1022
1023 // Get the current entry of the array into register ebx.
1024 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1025 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1026
1027 // Get the expected map from the stack or a smi in the
1028 // permanent slow case into register edx.
1029 __ mov(edx, Operand(esp, 3 * kPointerSize));
1030
1031 // Check if the expected map still matches that of the enumerable.
1032 // If not, we may have to filter the key.
1033 Label update_each;
1034 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1035 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1036 __ j(equal, &update_each, Label::kNear);
1037
Ben Murdoch097c5b22016-05-18 11:27:45 +01001038 // We might get here from TurboFan or Crankshaft when something in the
1039 // for-in loop body deopts and only now notice in fullcodegen, that we
1040 // can now longer use the enum cache, i.e. left fast mode. So better record
1041 // this information here, in case we later OSR back into this loop or
1042 // reoptimize the whole function w/o rerunning the loop with the slow
1043 // mode object in fullcodegen (which would result in a deopt loop).
1044 __ EmitLoadTypeFeedbackVector(edx);
1045 __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1046 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1047
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001048 // Convert the entry to a string or null if it isn't a property
1049 // anymore. If the property has been removed while iterating, we
1050 // just skip it.
1051 __ push(ecx); // Enumerable.
1052 __ push(ebx); // Current entry.
1053 __ CallRuntime(Runtime::kForInFilter);
1054 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1055 __ cmp(eax, isolate()->factory()->undefined_value());
1056 __ j(equal, loop_statement.continue_label());
1057 __ mov(ebx, eax);
1058
1059 // Update the 'each' property or variable from the possibly filtered
1060 // entry in register ebx.
1061 __ bind(&update_each);
1062 __ mov(result_register(), ebx);
1063 // Perform the assignment as if via '='.
1064 { EffectContext context(this);
1065 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1066 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1067 }
1068
1069 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1070 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1071 // Generate code for the body of the loop.
1072 Visit(stmt->body());
1073
1074 // Generate code for going to the next element by incrementing the
1075 // index (smi) stored on top of the stack.
1076 __ bind(loop_statement.continue_label());
1077 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1078
1079 EmitBackEdgeBookkeeping(stmt, &loop);
1080 __ jmp(&loop);
1081
1082 // Remove the pointers stored on the stack.
1083 __ bind(loop_statement.break_label());
1084 __ add(esp, Immediate(5 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001085 OperandStackDepthDecrement(ForIn::kElementCount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001086
1087 // Exit and decrement the loop depth.
1088 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1089 __ bind(&exit);
1090 decrement_loop_depth();
1091}
1092
1093
1094void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1095 bool pretenure) {
1096 // Use the fast case closure allocation code that allocates in new
1097 // space for nested functions that don't need literals cloning. If
1098 // we're running with the --always-opt or the --prepare-always-opt
1099 // flag, we need to use the runtime function so that the new function
1100 // we are creating here gets a chance to have its code optimized and
1101 // doesn't just get a copy of the existing unoptimized code.
1102 if (!FLAG_always_opt &&
1103 !FLAG_prepare_always_opt &&
1104 !pretenure &&
1105 scope()->is_function_scope() &&
1106 info->num_literals() == 0) {
1107 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1108 __ mov(ebx, Immediate(info));
1109 __ CallStub(&stub);
1110 } else {
1111 __ push(Immediate(info));
1112 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1113 : Runtime::kNewClosure);
1114 }
1115 context()->Plug(eax);
1116}
1117
1118
1119void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1120 FeedbackVectorSlot slot) {
1121 DCHECK(NeedsHomeObject(initializer));
1122 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1123 __ mov(StoreDescriptor::NameRegister(),
1124 Immediate(isolate()->factory()->home_object_symbol()));
1125 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1126 EmitLoadStoreICSlot(slot);
1127 CallStoreIC();
1128}
1129
1130
1131void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1132 int offset,
1133 FeedbackVectorSlot slot) {
1134 DCHECK(NeedsHomeObject(initializer));
1135 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1136 __ mov(StoreDescriptor::NameRegister(),
1137 Immediate(isolate()->factory()->home_object_symbol()));
1138 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1139 EmitLoadStoreICSlot(slot);
1140 CallStoreIC();
1141}
1142
1143
1144void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1145 TypeofMode typeof_mode,
1146 Label* slow) {
1147 Register context = esi;
1148 Register temp = edx;
1149
1150 Scope* s = scope();
1151 while (s != NULL) {
1152 if (s->num_heap_slots() > 0) {
1153 if (s->calls_sloppy_eval()) {
1154 // Check that extension is "the hole".
1155 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1156 Heap::kTheHoleValueRootIndex, slow);
1157 }
1158 // Load next context in chain.
1159 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1160 // Walk the rest of the chain without clobbering esi.
1161 context = temp;
1162 }
1163 // If no outer scope calls eval, we do not need to check more
1164 // context extensions. If we have reached an eval scope, we check
1165 // all extensions from this point.
1166 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1167 s = s->outer_scope();
1168 }
1169
1170 if (s != NULL && s->is_eval_scope()) {
1171 // Loop up the context chain. There is no frame effect so it is
1172 // safe to use raw labels here.
1173 Label next, fast;
1174 if (!context.is(temp)) {
1175 __ mov(temp, context);
1176 }
1177 __ bind(&next);
1178 // Terminate at native context.
1179 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1180 Immediate(isolate()->factory()->native_context_map()));
1181 __ j(equal, &fast, Label::kNear);
1182 // Check that extension is "the hole".
1183 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1184 Heap::kTheHoleValueRootIndex, slow);
1185 // Load next context in chain.
1186 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1187 __ jmp(&next);
1188 __ bind(&fast);
1189 }
1190
1191 // All extension objects were empty and it is safe to use a normal global
1192 // load machinery.
1193 EmitGlobalVariableLoad(proxy, typeof_mode);
1194}
1195
1196
1197MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1198 Label* slow) {
1199 DCHECK(var->IsContextSlot());
1200 Register context = esi;
1201 Register temp = ebx;
1202
1203 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1204 if (s->num_heap_slots() > 0) {
1205 if (s->calls_sloppy_eval()) {
1206 // Check that extension is "the hole".
1207 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1208 Heap::kTheHoleValueRootIndex, slow);
1209 }
1210 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1211 // Walk the rest of the chain without clobbering esi.
1212 context = temp;
1213 }
1214 }
1215 // Check that last extension is "the hole".
1216 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1217 Heap::kTheHoleValueRootIndex, slow);
1218
1219 // This function is used only for loads, not stores, so it's safe to
1220 // return an esi-based operand (the write barrier cannot be allowed to
1221 // destroy the esi register).
1222 return ContextOperand(context, var->index());
1223}
1224
1225
1226void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1227 TypeofMode typeof_mode,
1228 Label* slow, Label* done) {
1229 // Generate fast-case code for variables that might be shadowed by
1230 // eval-introduced variables. Eval is used a lot without
1231 // introducing variables. In those cases, we do not want to
1232 // perform a runtime call for all variables in the scope
1233 // containing the eval.
1234 Variable* var = proxy->var();
1235 if (var->mode() == DYNAMIC_GLOBAL) {
1236 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1237 __ jmp(done);
1238 } else if (var->mode() == DYNAMIC_LOCAL) {
1239 Variable* local = var->local_if_not_shadowed();
1240 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1241 if (local->mode() == LET || local->mode() == CONST ||
1242 local->mode() == CONST_LEGACY) {
1243 __ cmp(eax, isolate()->factory()->the_hole_value());
1244 __ j(not_equal, done);
1245 if (local->mode() == CONST_LEGACY) {
1246 __ mov(eax, isolate()->factory()->undefined_value());
1247 } else { // LET || CONST
1248 __ push(Immediate(var->name()));
1249 __ CallRuntime(Runtime::kThrowReferenceError);
1250 }
1251 }
1252 __ jmp(done);
1253 }
1254}
1255
1256
1257void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1258 TypeofMode typeof_mode) {
1259 Variable* var = proxy->var();
1260 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1261 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1262 __ mov(LoadDescriptor::ReceiverRegister(), NativeContextOperand());
1263 __ mov(LoadDescriptor::ReceiverRegister(),
1264 ContextOperand(LoadDescriptor::ReceiverRegister(),
1265 Context::EXTENSION_INDEX));
1266 __ mov(LoadDescriptor::NameRegister(), var->name());
1267 __ mov(LoadDescriptor::SlotRegister(),
1268 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1269 CallLoadIC(typeof_mode);
1270}
1271
1272
1273void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1274 TypeofMode typeof_mode) {
1275 SetExpressionPosition(proxy);
1276 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1277 Variable* var = proxy->var();
1278
1279 // Three cases: global variables, lookup variables, and all other types of
1280 // variables.
1281 switch (var->location()) {
1282 case VariableLocation::GLOBAL:
1283 case VariableLocation::UNALLOCATED: {
1284 Comment cmnt(masm_, "[ Global variable");
1285 EmitGlobalVariableLoad(proxy, typeof_mode);
1286 context()->Plug(eax);
1287 break;
1288 }
1289
1290 case VariableLocation::PARAMETER:
1291 case VariableLocation::LOCAL:
1292 case VariableLocation::CONTEXT: {
1293 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1294 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1295 : "[ Stack variable");
1296
1297 if (NeedsHoleCheckForLoad(proxy)) {
1298 // Let and const need a read barrier.
1299 Label done;
1300 GetVar(eax, var);
1301 __ cmp(eax, isolate()->factory()->the_hole_value());
1302 __ j(not_equal, &done, Label::kNear);
1303 if (var->mode() == LET || var->mode() == CONST) {
1304 // Throw a reference error when using an uninitialized let/const
1305 // binding in harmony mode.
1306 __ push(Immediate(var->name()));
1307 __ CallRuntime(Runtime::kThrowReferenceError);
1308 } else {
1309 // Uninitialized legacy const bindings are unholed.
1310 DCHECK(var->mode() == CONST_LEGACY);
1311 __ mov(eax, isolate()->factory()->undefined_value());
1312 }
1313 __ bind(&done);
1314 context()->Plug(eax);
1315 break;
1316 }
1317 context()->Plug(var);
1318 break;
1319 }
1320
1321 case VariableLocation::LOOKUP: {
1322 Comment cmnt(masm_, "[ Lookup variable");
1323 Label done, slow;
1324 // Generate code for loading from variables potentially shadowed
1325 // by eval-introduced variables.
1326 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1327 __ bind(&slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001328 __ push(Immediate(var->name()));
1329 Runtime::FunctionId function_id =
1330 typeof_mode == NOT_INSIDE_TYPEOF
1331 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001332 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001333 __ CallRuntime(function_id);
1334 __ bind(&done);
1335 context()->Plug(eax);
1336 break;
1337 }
1338 }
1339}
1340
1341
1342void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1343 Comment cmnt(masm_, "[ RegExpLiteral");
1344 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1345 __ Move(eax, Immediate(Smi::FromInt(expr->literal_index())));
1346 __ Move(ecx, Immediate(expr->pattern()));
1347 __ Move(edx, Immediate(Smi::FromInt(expr->flags())));
1348 FastCloneRegExpStub stub(isolate());
1349 __ CallStub(&stub);
1350 context()->Plug(eax);
1351}
1352
1353
1354void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1355 Expression* expression = (property == NULL) ? NULL : property->value();
1356 if (expression == NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001357 PushOperand(isolate()->factory()->null_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001358 } else {
1359 VisitForStackValue(expression);
1360 if (NeedsHomeObject(expression)) {
1361 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1362 property->kind() == ObjectLiteral::Property::SETTER);
1363 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1364 EmitSetHomeObject(expression, offset, property->GetSlot());
1365 }
1366 }
1367}
1368
1369
1370void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1371 Comment cmnt(masm_, "[ ObjectLiteral");
1372
1373 Handle<FixedArray> constant_properties = expr->constant_properties();
1374 int flags = expr->ComputeFlags();
1375 // If any of the keys would store to the elements array, then we shouldn't
1376 // allow it.
1377 if (MustCreateObjectLiteralWithRuntime(expr)) {
1378 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1379 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1380 __ push(Immediate(constant_properties));
1381 __ push(Immediate(Smi::FromInt(flags)));
1382 __ CallRuntime(Runtime::kCreateObjectLiteral);
1383 } else {
1384 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1385 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1386 __ mov(ecx, Immediate(constant_properties));
1387 __ mov(edx, Immediate(Smi::FromInt(flags)));
1388 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1389 __ CallStub(&stub);
1390 }
1391 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1392
1393 // If result_saved is true the result is on top of the stack. If
1394 // result_saved is false the result is in eax.
1395 bool result_saved = false;
1396
1397 AccessorTable accessor_table(zone());
1398 int property_index = 0;
1399 for (; property_index < expr->properties()->length(); property_index++) {
1400 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1401 if (property->is_computed_name()) break;
1402 if (property->IsCompileTimeValue()) continue;
1403
1404 Literal* key = property->key()->AsLiteral();
1405 Expression* value = property->value();
1406 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001407 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001408 result_saved = true;
1409 }
1410 switch (property->kind()) {
1411 case ObjectLiteral::Property::CONSTANT:
1412 UNREACHABLE();
1413 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1414 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1415 // Fall through.
1416 case ObjectLiteral::Property::COMPUTED:
1417 // It is safe to use [[Put]] here because the boilerplate already
1418 // contains computed properties with an uninitialized value.
1419 if (key->value()->IsInternalizedString()) {
1420 if (property->emit_store()) {
1421 VisitForAccumulatorValue(value);
1422 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1423 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1424 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1425 EmitLoadStoreICSlot(property->GetSlot(0));
1426 CallStoreIC();
1427 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1428 if (NeedsHomeObject(value)) {
1429 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1430 }
1431 } else {
1432 VisitForEffect(value);
1433 }
1434 break;
1435 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001436 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001437 VisitForStackValue(key);
1438 VisitForStackValue(value);
1439 if (property->emit_store()) {
1440 if (NeedsHomeObject(value)) {
1441 EmitSetHomeObject(value, 2, property->GetSlot());
1442 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001443 PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1444 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001445 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001446 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001447 }
1448 break;
1449 case ObjectLiteral::Property::PROTOTYPE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001450 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001451 VisitForStackValue(value);
1452 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001453 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001454 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1455 NO_REGISTERS);
1456 break;
1457 case ObjectLiteral::Property::GETTER:
1458 if (property->emit_store()) {
1459 accessor_table.lookup(key)->second->getter = property;
1460 }
1461 break;
1462 case ObjectLiteral::Property::SETTER:
1463 if (property->emit_store()) {
1464 accessor_table.lookup(key)->second->setter = property;
1465 }
1466 break;
1467 }
1468 }
1469
1470 // Emit code to define accessors, using only a single call to the runtime for
1471 // each pair of corresponding getters and setters.
1472 for (AccessorTable::Iterator it = accessor_table.begin();
1473 it != accessor_table.end();
1474 ++it) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001475 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001476 VisitForStackValue(it->first);
1477
1478 EmitAccessor(it->second->getter);
1479 EmitAccessor(it->second->setter);
1480
Ben Murdoch097c5b22016-05-18 11:27:45 +01001481 PushOperand(Smi::FromInt(NONE));
1482 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001483 }
1484
1485 // Object literals have two parts. The "static" part on the left contains no
1486 // computed property names, and so we can compute its map ahead of time; see
1487 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1488 // starts with the first computed property name, and continues with all
1489 // properties to its right. All the code from above initializes the static
1490 // component of the object literal, and arranges for the map of the result to
1491 // reflect the static order in which the keys appear. For the dynamic
1492 // properties, we compile them into a series of "SetOwnProperty" runtime
1493 // calls. This will preserve insertion order.
1494 for (; property_index < expr->properties()->length(); property_index++) {
1495 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1496
1497 Expression* value = property->value();
1498 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001499 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001500 result_saved = true;
1501 }
1502
Ben Murdoch097c5b22016-05-18 11:27:45 +01001503 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001504
1505 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1506 DCHECK(!property->is_computed_name());
1507 VisitForStackValue(value);
1508 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001509 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001510 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1511 NO_REGISTERS);
1512 } else {
1513 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1514 VisitForStackValue(value);
1515 if (NeedsHomeObject(value)) {
1516 EmitSetHomeObject(value, 2, property->GetSlot());
1517 }
1518
1519 switch (property->kind()) {
1520 case ObjectLiteral::Property::CONSTANT:
1521 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1522 case ObjectLiteral::Property::COMPUTED:
1523 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001524 PushOperand(Smi::FromInt(NONE));
1525 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1526 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001527 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001528 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001529 }
1530 break;
1531
1532 case ObjectLiteral::Property::PROTOTYPE:
1533 UNREACHABLE();
1534 break;
1535
1536 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001537 PushOperand(Smi::FromInt(NONE));
1538 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001539 break;
1540
1541 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001542 PushOperand(Smi::FromInt(NONE));
1543 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 break;
1545 }
1546 }
1547 }
1548
1549 if (expr->has_function()) {
1550 DCHECK(result_saved);
1551 __ push(Operand(esp, 0));
1552 __ CallRuntime(Runtime::kToFastProperties);
1553 }
1554
1555 if (result_saved) {
1556 context()->PlugTOS();
1557 } else {
1558 context()->Plug(eax);
1559 }
1560}
1561
1562
1563void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1564 Comment cmnt(masm_, "[ ArrayLiteral");
1565
1566 Handle<FixedArray> constant_elements = expr->constant_elements();
1567 bool has_constant_fast_elements =
1568 IsFastObjectElementsKind(expr->constant_elements_kind());
1569
1570 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1571 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1572 // If the only customer of allocation sites is transitioning, then
1573 // we can turn it off if we don't have anywhere else to transition to.
1574 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1575 }
1576
1577 if (MustCreateArrayLiteralWithRuntime(expr)) {
1578 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1579 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1580 __ push(Immediate(constant_elements));
1581 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1582 __ CallRuntime(Runtime::kCreateArrayLiteral);
1583 } else {
1584 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1585 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1586 __ mov(ecx, Immediate(constant_elements));
1587 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1588 __ CallStub(&stub);
1589 }
1590 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1591
1592 bool result_saved = false; // Is the result saved to the stack?
1593 ZoneList<Expression*>* subexprs = expr->values();
1594 int length = subexprs->length();
1595
1596 // Emit code to evaluate all the non-constant subexpressions and to store
1597 // them into the newly cloned array.
1598 int array_index = 0;
1599 for (; array_index < length; array_index++) {
1600 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001601 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001602
1603 // If the subexpression is a literal or a simple materialized literal it
1604 // is already set in the cloned array.
1605 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1606
1607 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001608 PushOperand(eax); // array literal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609 result_saved = true;
1610 }
1611 VisitForAccumulatorValue(subexpr);
1612
1613 __ mov(StoreDescriptor::NameRegister(),
1614 Immediate(Smi::FromInt(array_index)));
1615 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1616 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1617 Handle<Code> ic =
1618 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1619 CallIC(ic);
1620 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1621 }
1622
1623 // In case the array literal contains spread expressions it has two parts. The
1624 // first part is the "static" array which has a literal index is handled
1625 // above. The second part is the part after the first spread expression
1626 // (inclusive) and these elements gets appended to the array. Note that the
1627 // number elements an iterable produces is unknown ahead of time.
1628 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001629 PopOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001630 result_saved = false;
1631 }
1632 for (; array_index < length; array_index++) {
1633 Expression* subexpr = subexprs->at(array_index);
1634
Ben Murdoch097c5b22016-05-18 11:27:45 +01001635 PushOperand(eax);
1636 DCHECK(!subexpr->IsSpread());
1637 VisitForStackValue(subexpr);
1638 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001639
1640 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1641 }
1642
1643 if (result_saved) {
1644 context()->PlugTOS();
1645 } else {
1646 context()->Plug(eax);
1647 }
1648}
1649
1650
1651void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1652 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1653
1654 Comment cmnt(masm_, "[ Assignment");
1655 SetExpressionPosition(expr, INSERT_BREAK);
1656
1657 Property* property = expr->target()->AsProperty();
1658 LhsKind assign_type = Property::GetAssignType(property);
1659
1660 // Evaluate LHS expression.
1661 switch (assign_type) {
1662 case VARIABLE:
1663 // Nothing to do here.
1664 break;
1665 case NAMED_SUPER_PROPERTY:
1666 VisitForStackValue(
1667 property->obj()->AsSuperPropertyReference()->this_var());
1668 VisitForAccumulatorValue(
1669 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001670 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001671 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001672 PushOperand(MemOperand(esp, kPointerSize));
1673 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001674 }
1675 break;
1676 case NAMED_PROPERTY:
1677 if (expr->is_compound()) {
1678 // We need the receiver both on the stack and in the register.
1679 VisitForStackValue(property->obj());
1680 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1681 } else {
1682 VisitForStackValue(property->obj());
1683 }
1684 break;
1685 case KEYED_SUPER_PROPERTY:
1686 VisitForStackValue(
1687 property->obj()->AsSuperPropertyReference()->this_var());
1688 VisitForStackValue(
1689 property->obj()->AsSuperPropertyReference()->home_object());
1690 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001691 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001692 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001693 PushOperand(MemOperand(esp, 2 * kPointerSize));
1694 PushOperand(MemOperand(esp, 2 * kPointerSize));
1695 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001696 }
1697 break;
1698 case KEYED_PROPERTY: {
1699 if (expr->is_compound()) {
1700 VisitForStackValue(property->obj());
1701 VisitForStackValue(property->key());
1702 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1703 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1704 } else {
1705 VisitForStackValue(property->obj());
1706 VisitForStackValue(property->key());
1707 }
1708 break;
1709 }
1710 }
1711
1712 // For compound assignments we need another deoptimization point after the
1713 // variable/property load.
1714 if (expr->is_compound()) {
1715 AccumulatorValueContext result_context(this);
1716 { AccumulatorValueContext left_operand_context(this);
1717 switch (assign_type) {
1718 case VARIABLE:
1719 EmitVariableLoad(expr->target()->AsVariableProxy());
1720 PrepareForBailout(expr->target(), TOS_REG);
1721 break;
1722 case NAMED_SUPER_PROPERTY:
1723 EmitNamedSuperPropertyLoad(property);
1724 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1725 break;
1726 case NAMED_PROPERTY:
1727 EmitNamedPropertyLoad(property);
1728 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1729 break;
1730 case KEYED_SUPER_PROPERTY:
1731 EmitKeyedSuperPropertyLoad(property);
1732 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1733 break;
1734 case KEYED_PROPERTY:
1735 EmitKeyedPropertyLoad(property);
1736 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1737 break;
1738 }
1739 }
1740
1741 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001742 PushOperand(eax); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001743 VisitForAccumulatorValue(expr->value());
1744
1745 if (ShouldInlineSmiCase(op)) {
1746 EmitInlineSmiBinaryOp(expr->binary_operation(),
1747 op,
1748 expr->target(),
1749 expr->value());
1750 } else {
1751 EmitBinaryOp(expr->binary_operation(), op);
1752 }
1753
1754 // Deoptimization point in case the binary operation may have side effects.
1755 PrepareForBailout(expr->binary_operation(), TOS_REG);
1756 } else {
1757 VisitForAccumulatorValue(expr->value());
1758 }
1759
1760 SetExpressionPosition(expr);
1761
1762 // Store the value.
1763 switch (assign_type) {
1764 case VARIABLE:
1765 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1766 expr->op(), expr->AssignmentSlot());
1767 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1768 context()->Plug(eax);
1769 break;
1770 case NAMED_PROPERTY:
1771 EmitNamedPropertyAssignment(expr);
1772 break;
1773 case NAMED_SUPER_PROPERTY:
1774 EmitNamedSuperPropertyStore(property);
1775 context()->Plug(result_register());
1776 break;
1777 case KEYED_SUPER_PROPERTY:
1778 EmitKeyedSuperPropertyStore(property);
1779 context()->Plug(result_register());
1780 break;
1781 case KEYED_PROPERTY:
1782 EmitKeyedPropertyAssignment(expr);
1783 break;
1784 }
1785}
1786
1787
1788void FullCodeGenerator::VisitYield(Yield* expr) {
1789 Comment cmnt(masm_, "[ Yield");
1790 SetExpressionPosition(expr);
1791
1792 // Evaluate yielded value first; the initial iterator definition depends on
1793 // this. It stays on the stack while we update the iterator.
1794 VisitForStackValue(expr->expression());
1795
1796 switch (expr->yield_kind()) {
1797 case Yield::kSuspend:
1798 // Pop value from top-of-stack slot; box result into result register.
1799 EmitCreateIteratorResult(false);
1800 __ push(result_register());
1801 // Fall through.
1802 case Yield::kInitial: {
1803 Label suspend, continuation, post_runtime, resume;
1804
1805 __ jmp(&suspend);
1806 __ bind(&continuation);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001807 // When we arrive here, the stack top is the resume mode and
1808 // result_register() holds the input value (the argument given to the
1809 // respective resume operation).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001810 __ RecordGeneratorContinuation();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001811 __ pop(ebx);
1812 __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::RETURN)));
1813 __ j(not_equal, &resume);
1814 __ push(result_register());
1815 EmitCreateIteratorResult(true);
1816 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001817
1818 __ bind(&suspend);
1819 VisitForAccumulatorValue(expr->generator_object());
1820 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1821 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1822 Immediate(Smi::FromInt(continuation.pos())));
1823 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1824 __ mov(ecx, esi);
1825 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1826 kDontSaveFPRegs);
1827 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1828 __ cmp(esp, ebx);
1829 __ j(equal, &post_runtime);
1830 __ push(eax); // generator object
1831 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1832 __ mov(context_register(),
1833 Operand(ebp, StandardFrameConstants::kContextOffset));
1834 __ bind(&post_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001835 PopOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001836 EmitReturnSequence();
1837
1838 __ bind(&resume);
1839 context()->Plug(result_register());
1840 break;
1841 }
1842
1843 case Yield::kFinal: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001844 // Pop value from top-of-stack slot, box result into result register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001845 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001846 EmitCreateIteratorResult(true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001847 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001848 break;
1849 }
1850
Ben Murdoch097c5b22016-05-18 11:27:45 +01001851 case Yield::kDelegating:
1852 UNREACHABLE();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001853 }
1854}
1855
1856
1857void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1858 Expression *value,
1859 JSGeneratorObject::ResumeMode resume_mode) {
1860 // The value stays in eax, and is ultimately read by the resumed generator, as
1861 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1862 // is read to throw the value when the resumed generator is already closed.
1863 // ebx will hold the generator object until the activation has been resumed.
1864 VisitForStackValue(generator);
1865 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001866 PopOperand(ebx);
1867
1868 // Store input value into generator object.
1869 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOffset), result_register());
1870 __ mov(ecx, result_register());
1871 __ RecordWriteField(ebx, JSGeneratorObject::kInputOffset, ecx, edx,
1872 kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001873
1874 // Load suspended function and context.
1875 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
1876 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
1877
1878 // Push receiver.
1879 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
1880
1881 // Push holes for arguments to generator function.
1882 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1883 __ mov(edx,
1884 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1885 __ mov(ecx, isolate()->factory()->the_hole_value());
1886 Label push_argument_holes, push_frame;
1887 __ bind(&push_argument_holes);
1888 __ sub(edx, Immediate(Smi::FromInt(1)));
1889 __ j(carry, &push_frame);
1890 __ push(ecx);
1891 __ jmp(&push_argument_holes);
1892
1893 // Enter a new JavaScript frame, and initialize its slots as they were when
1894 // the generator was suspended.
1895 Label resume_frame, done;
1896 __ bind(&push_frame);
1897 __ call(&resume_frame);
1898 __ jmp(&done);
1899 __ bind(&resume_frame);
1900 __ push(ebp); // Caller's frame pointer.
1901 __ mov(ebp, esp);
1902 __ push(esi); // Callee's context.
1903 __ push(edi); // Callee's JS Function.
1904
1905 // Load the operand stack size.
1906 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
1907 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
1908 __ SmiUntag(edx);
1909
1910 // If we are sending a value and there is no operand stack, we can jump back
1911 // in directly.
1912 if (resume_mode == JSGeneratorObject::NEXT) {
1913 Label slow_resume;
1914 __ cmp(edx, Immediate(0));
1915 __ j(not_zero, &slow_resume);
1916 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
1917 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
1918 __ SmiUntag(ecx);
1919 __ add(edx, ecx);
1920 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
1921 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001922 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001923 __ jmp(edx);
1924 __ bind(&slow_resume);
1925 }
1926
1927 // Otherwise, we push holes for the operand stack and call the runtime to fix
1928 // up the stack and the handlers.
1929 Label push_operand_holes, call_resume;
1930 __ bind(&push_operand_holes);
1931 __ sub(edx, Immediate(1));
1932 __ j(carry, &call_resume);
1933 __ push(ecx);
1934 __ jmp(&push_operand_holes);
1935 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001936 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001937 __ push(ebx);
1938 __ push(result_register());
1939 __ Push(Smi::FromInt(resume_mode));
1940 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
1941 // Not reached: the runtime call returns elsewhere.
1942 __ Abort(kGeneratorFailedToResume);
1943
1944 __ bind(&done);
1945 context()->Plug(result_register());
1946}
1947
Ben Murdoch097c5b22016-05-18 11:27:45 +01001948void FullCodeGenerator::PushOperand(MemOperand operand) {
1949 OperandStackDepthIncrement(1);
1950 __ Push(operand);
1951}
1952
1953void FullCodeGenerator::EmitOperandStackDepthCheck() {
1954 if (FLAG_debug_code) {
1955 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1956 operand_stack_depth_ * kPointerSize;
1957 __ mov(eax, ebp);
1958 __ sub(eax, esp);
1959 __ cmp(eax, Immediate(expected_diff));
1960 __ Assert(equal, kUnexpectedStackDepth);
1961 }
1962}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001963
1964void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1965 Label allocate, done_allocate;
1966
1967 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate, TAG_OBJECT);
1968 __ jmp(&done_allocate, Label::kNear);
1969
1970 __ bind(&allocate);
1971 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1972 __ CallRuntime(Runtime::kAllocateInNewSpace);
1973
1974 __ bind(&done_allocate);
1975 __ mov(ebx, NativeContextOperand());
1976 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1977 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1978 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1979 isolate()->factory()->empty_fixed_array());
1980 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1981 isolate()->factory()->empty_fixed_array());
1982 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1983 __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1984 isolate()->factory()->ToBoolean(done));
1985 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1986}
1987
1988
1989void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1990 SetExpressionPosition(prop);
1991 Literal* key = prop->key()->AsLiteral();
1992 DCHECK(!key->value()->IsSmi());
1993 DCHECK(!prop->IsSuperAccess());
1994
1995 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
1996 __ mov(LoadDescriptor::SlotRegister(),
1997 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001998 CallLoadIC(NOT_INSIDE_TYPEOF);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001999}
2000
2001
2002void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2003 Token::Value op,
2004 Expression* left,
2005 Expression* right) {
2006 // Do combined smi check of the operands. Left operand is on the
2007 // stack. Right operand is in eax.
2008 Label smi_case, done, stub_call;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002009 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002010 __ mov(ecx, eax);
2011 __ or_(eax, edx);
2012 JumpPatchSite patch_site(masm_);
2013 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2014
2015 __ bind(&stub_call);
2016 __ mov(eax, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002017 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002018 CallIC(code, expr->BinaryOperationFeedbackId());
2019 patch_site.EmitPatchInfo();
2020 __ jmp(&done, Label::kNear);
2021
2022 // Smi case.
2023 __ bind(&smi_case);
2024 __ mov(eax, edx); // Copy left operand in case of a stub call.
2025
2026 switch (op) {
2027 case Token::SAR:
2028 __ SmiUntag(ecx);
2029 __ sar_cl(eax); // No checks of result necessary
2030 __ and_(eax, Immediate(~kSmiTagMask));
2031 break;
2032 case Token::SHL: {
2033 Label result_ok;
2034 __ SmiUntag(eax);
2035 __ SmiUntag(ecx);
2036 __ shl_cl(eax);
2037 // Check that the *signed* result fits in a smi.
2038 __ cmp(eax, 0xc0000000);
2039 __ j(positive, &result_ok);
2040 __ SmiTag(ecx);
2041 __ jmp(&stub_call);
2042 __ bind(&result_ok);
2043 __ SmiTag(eax);
2044 break;
2045 }
2046 case Token::SHR: {
2047 Label result_ok;
2048 __ SmiUntag(eax);
2049 __ SmiUntag(ecx);
2050 __ shr_cl(eax);
2051 __ test(eax, Immediate(0xc0000000));
2052 __ j(zero, &result_ok);
2053 __ SmiTag(ecx);
2054 __ jmp(&stub_call);
2055 __ bind(&result_ok);
2056 __ SmiTag(eax);
2057 break;
2058 }
2059 case Token::ADD:
2060 __ add(eax, ecx);
2061 __ j(overflow, &stub_call);
2062 break;
2063 case Token::SUB:
2064 __ sub(eax, ecx);
2065 __ j(overflow, &stub_call);
2066 break;
2067 case Token::MUL: {
2068 __ SmiUntag(eax);
2069 __ imul(eax, ecx);
2070 __ j(overflow, &stub_call);
2071 __ test(eax, eax);
2072 __ j(not_zero, &done, Label::kNear);
2073 __ mov(ebx, edx);
2074 __ or_(ebx, ecx);
2075 __ j(negative, &stub_call);
2076 break;
2077 }
2078 case Token::BIT_OR:
2079 __ or_(eax, ecx);
2080 break;
2081 case Token::BIT_AND:
2082 __ and_(eax, ecx);
2083 break;
2084 case Token::BIT_XOR:
2085 __ xor_(eax, ecx);
2086 break;
2087 default:
2088 UNREACHABLE();
2089 }
2090
2091 __ bind(&done);
2092 context()->Plug(eax);
2093}
2094
2095
2096void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002097 for (int i = 0; i < lit->properties()->length(); i++) {
2098 ObjectLiteral::Property* property = lit->properties()->at(i);
2099 Expression* value = property->value();
2100
2101 if (property->is_static()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002102 PushOperand(Operand(esp, kPointerSize)); // constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002103 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002104 PushOperand(Operand(esp, 0)); // prototype
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002105 }
2106 EmitPropertyKey(property, lit->GetIdForProperty(i));
2107
2108 // The static prototype property is read only. We handle the non computed
2109 // property name case in the parser. Since this is the only case where we
2110 // need to check for an own read only property we special case this so we do
2111 // not need to do this for every property.
2112 if (property->is_static() && property->is_computed_name()) {
2113 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2114 __ push(eax);
2115 }
2116
2117 VisitForStackValue(value);
2118 if (NeedsHomeObject(value)) {
2119 EmitSetHomeObject(value, 2, property->GetSlot());
2120 }
2121
2122 switch (property->kind()) {
2123 case ObjectLiteral::Property::CONSTANT:
2124 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2125 case ObjectLiteral::Property::PROTOTYPE:
2126 UNREACHABLE();
2127 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002128 PushOperand(Smi::FromInt(DONT_ENUM));
2129 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2130 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002131 break;
2132
2133 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002134 PushOperand(Smi::FromInt(DONT_ENUM));
2135 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002136 break;
2137
2138 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002139 PushOperand(Smi::FromInt(DONT_ENUM));
2140 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002141 break;
2142 }
2143 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002144}
2145
2146
2147void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002148 PopOperand(edx);
2149 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002150 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2151 CallIC(code, expr->BinaryOperationFeedbackId());
2152 patch_site.EmitPatchInfo();
2153 context()->Plug(eax);
2154}
2155
2156
2157void FullCodeGenerator::EmitAssignment(Expression* expr,
2158 FeedbackVectorSlot slot) {
2159 DCHECK(expr->IsValidReferenceExpressionOrThis());
2160
2161 Property* prop = expr->AsProperty();
2162 LhsKind assign_type = Property::GetAssignType(prop);
2163
2164 switch (assign_type) {
2165 case VARIABLE: {
2166 Variable* var = expr->AsVariableProxy()->var();
2167 EffectContext context(this);
2168 EmitVariableAssignment(var, Token::ASSIGN, slot);
2169 break;
2170 }
2171 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002172 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002173 VisitForAccumulatorValue(prop->obj());
2174 __ Move(StoreDescriptor::ReceiverRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002175 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002176 __ mov(StoreDescriptor::NameRegister(),
2177 prop->key()->AsLiteral()->value());
2178 EmitLoadStoreICSlot(slot);
2179 CallStoreIC();
2180 break;
2181 }
2182 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002183 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002184 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2185 VisitForAccumulatorValue(
2186 prop->obj()->AsSuperPropertyReference()->home_object());
2187 // stack: value, this; eax: home_object
2188 Register scratch = ecx;
2189 Register scratch2 = edx;
2190 __ mov(scratch, result_register()); // home_object
2191 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2192 __ mov(scratch2, MemOperand(esp, 0)); // this
2193 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2194 __ mov(MemOperand(esp, 0), scratch); // home_object
2195 // stack: this, home_object. eax: value
2196 EmitNamedSuperPropertyStore(prop);
2197 break;
2198 }
2199 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002200 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002201 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2202 VisitForStackValue(
2203 prop->obj()->AsSuperPropertyReference()->home_object());
2204 VisitForAccumulatorValue(prop->key());
2205 Register scratch = ecx;
2206 Register scratch2 = edx;
2207 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2208 // stack: value, this, home_object; eax: key, edx: value
2209 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2210 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2211 __ mov(scratch, MemOperand(esp, 0)); // home_object
2212 __ mov(MemOperand(esp, kPointerSize), scratch);
2213 __ mov(MemOperand(esp, 0), eax);
2214 __ mov(eax, scratch2);
2215 // stack: this, home_object, key; eax: value.
2216 EmitKeyedSuperPropertyStore(prop);
2217 break;
2218 }
2219 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002220 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002221 VisitForStackValue(prop->obj());
2222 VisitForAccumulatorValue(prop->key());
2223 __ Move(StoreDescriptor::NameRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002224 PopOperand(StoreDescriptor::ReceiverRegister()); // Receiver.
2225 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002226 EmitLoadStoreICSlot(slot);
2227 Handle<Code> ic =
2228 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2229 CallIC(ic);
2230 break;
2231 }
2232 }
2233 context()->Plug(eax);
2234}
2235
2236
2237void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2238 Variable* var, MemOperand location) {
2239 __ mov(location, eax);
2240 if (var->IsContextSlot()) {
2241 __ mov(edx, eax);
2242 int offset = Context::SlotOffset(var->index());
2243 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2244 }
2245}
2246
2247
2248void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2249 FeedbackVectorSlot slot) {
2250 if (var->IsUnallocated()) {
2251 // Global var, const, or let.
2252 __ mov(StoreDescriptor::NameRegister(), var->name());
2253 __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2254 __ mov(StoreDescriptor::ReceiverRegister(),
2255 ContextOperand(StoreDescriptor::ReceiverRegister(),
2256 Context::EXTENSION_INDEX));
2257 EmitLoadStoreICSlot(slot);
2258 CallStoreIC();
2259
2260 } else if (var->mode() == LET && op != Token::INIT) {
2261 // Non-initializing assignment to let variable needs a write barrier.
2262 DCHECK(!var->IsLookupSlot());
2263 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2264 Label assign;
2265 MemOperand location = VarOperand(var, ecx);
2266 __ mov(edx, location);
2267 __ cmp(edx, isolate()->factory()->the_hole_value());
2268 __ j(not_equal, &assign, Label::kNear);
2269 __ push(Immediate(var->name()));
2270 __ CallRuntime(Runtime::kThrowReferenceError);
2271 __ bind(&assign);
2272 EmitStoreToStackLocalOrContextSlot(var, location);
2273
2274 } else if (var->mode() == CONST && op != Token::INIT) {
2275 // Assignment to const variable needs a write barrier.
2276 DCHECK(!var->IsLookupSlot());
2277 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2278 Label const_error;
2279 MemOperand location = VarOperand(var, ecx);
2280 __ mov(edx, location);
2281 __ cmp(edx, isolate()->factory()->the_hole_value());
2282 __ j(not_equal, &const_error, Label::kNear);
2283 __ push(Immediate(var->name()));
2284 __ CallRuntime(Runtime::kThrowReferenceError);
2285 __ bind(&const_error);
2286 __ CallRuntime(Runtime::kThrowConstAssignError);
2287
2288 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2289 // Initializing assignment to const {this} needs a write barrier.
2290 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2291 Label uninitialized_this;
2292 MemOperand location = VarOperand(var, ecx);
2293 __ mov(edx, location);
2294 __ cmp(edx, isolate()->factory()->the_hole_value());
2295 __ j(equal, &uninitialized_this);
2296 __ push(Immediate(var->name()));
2297 __ CallRuntime(Runtime::kThrowReferenceError);
2298 __ bind(&uninitialized_this);
2299 EmitStoreToStackLocalOrContextSlot(var, location);
2300
2301 } else if (!var->is_const_mode() ||
2302 (var->mode() == CONST && op == Token::INIT)) {
2303 if (var->IsLookupSlot()) {
2304 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002305 __ Push(Immediate(var->name()));
2306 __ Push(eax);
2307 __ CallRuntime(is_strict(language_mode())
2308 ? Runtime::kStoreLookupSlot_Strict
2309 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002310 } else {
2311 // Assignment to var or initializing assignment to let/const in harmony
2312 // mode.
2313 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2314 MemOperand location = VarOperand(var, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002315 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002316 // Check for an uninitialized let binding.
2317 __ mov(edx, location);
2318 __ cmp(edx, isolate()->factory()->the_hole_value());
2319 __ Check(equal, kLetBindingReInitialization);
2320 }
2321 EmitStoreToStackLocalOrContextSlot(var, location);
2322 }
2323
2324 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2325 // Const initializers need a write barrier.
2326 DCHECK(!var->IsParameter()); // No const parameters.
2327 if (var->IsLookupSlot()) {
2328 __ push(eax);
2329 __ push(esi);
2330 __ push(Immediate(var->name()));
2331 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2332 } else {
2333 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2334 Label skip;
2335 MemOperand location = VarOperand(var, ecx);
2336 __ mov(edx, location);
2337 __ cmp(edx, isolate()->factory()->the_hole_value());
2338 __ j(not_equal, &skip, Label::kNear);
2339 EmitStoreToStackLocalOrContextSlot(var, location);
2340 __ bind(&skip);
2341 }
2342
2343 } else {
2344 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2345 if (is_strict(language_mode())) {
2346 __ CallRuntime(Runtime::kThrowConstAssignError);
2347 }
2348 // Silently ignore store in sloppy mode.
2349 }
2350}
2351
2352
2353void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2354 // Assignment to a property, using a named store IC.
2355 // eax : value
2356 // esp[0] : receiver
2357 Property* prop = expr->target()->AsProperty();
2358 DCHECK(prop != NULL);
2359 DCHECK(prop->key()->IsLiteral());
2360
2361 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002362 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002363 EmitLoadStoreICSlot(expr->AssignmentSlot());
2364 CallStoreIC();
2365 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2366 context()->Plug(eax);
2367}
2368
2369
2370void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2371 // Assignment to named property of super.
2372 // eax : value
2373 // stack : receiver ('this'), home_object
2374 DCHECK(prop != NULL);
2375 Literal* key = prop->key()->AsLiteral();
2376 DCHECK(key != NULL);
2377
Ben Murdoch097c5b22016-05-18 11:27:45 +01002378 PushOperand(key->value());
2379 PushOperand(eax);
2380 CallRuntimeWithOperands(is_strict(language_mode())
2381 ? Runtime::kStoreToSuper_Strict
2382 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002383}
2384
2385
2386void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2387 // Assignment to named property of super.
2388 // eax : value
2389 // stack : receiver ('this'), home_object, key
2390
Ben Murdoch097c5b22016-05-18 11:27:45 +01002391 PushOperand(eax);
2392 CallRuntimeWithOperands(is_strict(language_mode())
2393 ? Runtime::kStoreKeyedToSuper_Strict
2394 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002395}
2396
2397
2398void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2399 // Assignment to a property, using a keyed store IC.
2400 // eax : value
2401 // esp[0] : key
2402 // esp[kPointerSize] : receiver
2403
Ben Murdoch097c5b22016-05-18 11:27:45 +01002404 PopOperand(StoreDescriptor::NameRegister()); // Key.
2405 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002406 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2407 Handle<Code> ic =
2408 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2409 EmitLoadStoreICSlot(expr->AssignmentSlot());
2410 CallIC(ic);
2411 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2412 context()->Plug(eax);
2413}
2414
2415
2416void FullCodeGenerator::VisitProperty(Property* expr) {
2417 Comment cmnt(masm_, "[ Property");
2418 SetExpressionPosition(expr);
2419
2420 Expression* key = expr->key();
2421
2422 if (key->IsPropertyName()) {
2423 if (!expr->IsSuperAccess()) {
2424 VisitForAccumulatorValue(expr->obj());
2425 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2426 EmitNamedPropertyLoad(expr);
2427 } else {
2428 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2429 VisitForStackValue(
2430 expr->obj()->AsSuperPropertyReference()->home_object());
2431 EmitNamedSuperPropertyLoad(expr);
2432 }
2433 } else {
2434 if (!expr->IsSuperAccess()) {
2435 VisitForStackValue(expr->obj());
2436 VisitForAccumulatorValue(expr->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002437 PopOperand(LoadDescriptor::ReceiverRegister()); // Object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002438 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2439 EmitKeyedPropertyLoad(expr);
2440 } else {
2441 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2442 VisitForStackValue(
2443 expr->obj()->AsSuperPropertyReference()->home_object());
2444 VisitForStackValue(expr->key());
2445 EmitKeyedSuperPropertyLoad(expr);
2446 }
2447 }
2448 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2449 context()->Plug(eax);
2450}
2451
2452
2453void FullCodeGenerator::CallIC(Handle<Code> code,
2454 TypeFeedbackId ast_id) {
2455 ic_total_count_++;
2456 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2457}
2458
2459
2460// Code common for calls using the IC.
2461void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2462 Expression* callee = expr->expression();
2463
2464 // Get the target function.
2465 ConvertReceiverMode convert_mode;
2466 if (callee->IsVariableProxy()) {
2467 { StackValueContext context(this);
2468 EmitVariableLoad(callee->AsVariableProxy());
2469 PrepareForBailout(callee, NO_REGISTERS);
2470 }
2471 // Push undefined as receiver. This is patched in the method prologue if it
2472 // is a sloppy mode method.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002473 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002474 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2475 } else {
2476 // Load the function from the receiver.
2477 DCHECK(callee->IsProperty());
2478 DCHECK(!callee->AsProperty()->IsSuperAccess());
2479 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2480 EmitNamedPropertyLoad(callee->AsProperty());
2481 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2482 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002483 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002484 __ mov(Operand(esp, kPointerSize), eax);
2485 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2486 }
2487
2488 EmitCall(expr, convert_mode);
2489}
2490
2491
2492void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2493 SetExpressionPosition(expr);
2494 Expression* callee = expr->expression();
2495 DCHECK(callee->IsProperty());
2496 Property* prop = callee->AsProperty();
2497 DCHECK(prop->IsSuperAccess());
2498
2499 Literal* key = prop->key()->AsLiteral();
2500 DCHECK(!key->value()->IsSmi());
2501 // Load the function from the receiver.
2502 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2503 VisitForStackValue(super_ref->home_object());
2504 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002505 PushOperand(eax);
2506 PushOperand(eax);
2507 PushOperand(Operand(esp, kPointerSize * 2));
2508 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002509 // Stack here:
2510 // - home_object
2511 // - this (receiver)
2512 // - this (receiver) <-- LoadFromSuper will pop here and below.
2513 // - home_object
2514 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002515 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002516
2517 // Replace home_object with target function.
2518 __ mov(Operand(esp, kPointerSize), eax);
2519
2520 // Stack here:
2521 // - target function
2522 // - this (receiver)
2523 EmitCall(expr);
2524}
2525
2526
2527// Code common for calls using the IC.
2528void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2529 Expression* key) {
2530 // Load the key.
2531 VisitForAccumulatorValue(key);
2532
2533 Expression* callee = expr->expression();
2534
2535 // Load the function from the receiver.
2536 DCHECK(callee->IsProperty());
2537 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2538 __ mov(LoadDescriptor::NameRegister(), eax);
2539 EmitKeyedPropertyLoad(callee->AsProperty());
2540 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2541
2542 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002543 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002544 __ mov(Operand(esp, kPointerSize), eax);
2545
2546 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2547}
2548
2549
2550void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2551 Expression* callee = expr->expression();
2552 DCHECK(callee->IsProperty());
2553 Property* prop = callee->AsProperty();
2554 DCHECK(prop->IsSuperAccess());
2555
2556 SetExpressionPosition(prop);
2557 // Load the function from the receiver.
2558 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2559 VisitForStackValue(super_ref->home_object());
2560 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002561 PushOperand(eax);
2562 PushOperand(eax);
2563 PushOperand(Operand(esp, kPointerSize * 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002564 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002565 // Stack here:
2566 // - home_object
2567 // - this (receiver)
2568 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2569 // - home_object
2570 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002571 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002572
2573 // Replace home_object with target function.
2574 __ mov(Operand(esp, kPointerSize), eax);
2575
2576 // Stack here:
2577 // - target function
2578 // - this (receiver)
2579 EmitCall(expr);
2580}
2581
2582
2583void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2584 // Load the arguments.
2585 ZoneList<Expression*>* args = expr->arguments();
2586 int arg_count = args->length();
2587 for (int i = 0; i < arg_count; i++) {
2588 VisitForStackValue(args->at(i));
2589 }
2590
2591 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2592 SetCallPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002593 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2594 if (FLAG_trace) {
2595 __ CallRuntime(Runtime::kTraceTailCall);
2596 }
2597 // Update profiling counters before the tail call since we will
2598 // not return to this function.
2599 EmitProfilingCounterHandlingForReturnSequence(true);
2600 }
2601 Handle<Code> ic =
2602 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2603 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002604 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2605 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2606 // Don't assign a type feedback id to the IC, since type feedback is provided
2607 // by the vector above.
2608 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002609 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002610
2611 RecordJSReturnSite(expr);
2612
2613 // Restore context register.
2614 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2615
2616 context()->DropAndPlug(1, eax);
2617}
2618
2619
2620void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2621 // Push copy of the first argument or undefined if it doesn't exist.
2622 if (arg_count > 0) {
2623 __ push(Operand(esp, arg_count * kPointerSize));
2624 } else {
2625 __ push(Immediate(isolate()->factory()->undefined_value()));
2626 }
2627
2628 // Push the enclosing function.
2629 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2630
2631 // Push the language mode.
2632 __ push(Immediate(Smi::FromInt(language_mode())));
2633
2634 // Push the start position of the scope the calls resides in.
2635 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2636
2637 // Do the runtime call.
2638 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2639}
2640
2641
2642// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2643void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2644 VariableProxy* callee = expr->expression()->AsVariableProxy();
2645 if (callee->var()->IsLookupSlot()) {
2646 Label slow, done;
2647 SetExpressionPosition(callee);
2648 // Generate code for loading from variables potentially shadowed by
2649 // eval-introduced variables.
2650 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2651
2652 __ bind(&slow);
2653 // Call the runtime to find the function to call (returned in eax) and
2654 // the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002655 __ Push(callee->name());
2656 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2657 PushOperand(eax); // Function.
2658 PushOperand(edx); // Receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002659 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2660
2661 // If fast case code has been generated, emit code to push the function
2662 // and receiver and have the slow path jump around this code.
2663 if (done.is_linked()) {
2664 Label call;
2665 __ jmp(&call, Label::kNear);
2666 __ bind(&done);
2667 // Push function.
2668 __ push(eax);
2669 // The receiver is implicitly the global receiver. Indicate this by
2670 // passing the hole to the call function stub.
2671 __ push(Immediate(isolate()->factory()->undefined_value()));
2672 __ bind(&call);
2673 }
2674 } else {
2675 VisitForStackValue(callee);
2676 // refEnv.WithBaseObject()
Ben Murdoch097c5b22016-05-18 11:27:45 +01002677 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002678 }
2679}
2680
2681
2682void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2683 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2684 // to resolve the function we need to call. Then we call the resolved
2685 // function using the given arguments.
2686 ZoneList<Expression*>* args = expr->arguments();
2687 int arg_count = args->length();
2688
2689 PushCalleeAndWithBaseObject(expr);
2690
2691 // Push the arguments.
2692 for (int i = 0; i < arg_count; i++) {
2693 VisitForStackValue(args->at(i));
2694 }
2695
2696 // Push a copy of the function (found below the arguments) and
2697 // resolve eval.
2698 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2699 EmitResolvePossiblyDirectEval(arg_count);
2700
2701 // Touch up the stack with the resolved function.
2702 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2703
2704 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2705
2706 SetCallPosition(expr);
2707 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2708 __ Set(eax, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002709 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2710 expr->tail_call_mode()),
2711 RelocInfo::CODE_TARGET);
2712 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002713 RecordJSReturnSite(expr);
2714 // Restore context register.
2715 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2716 context()->DropAndPlug(1, eax);
2717}
2718
2719
2720void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2721 Comment cmnt(masm_, "[ CallNew");
2722 // According to ECMA-262, section 11.2.2, page 44, the function
2723 // expression in new calls must be evaluated before the
2724 // arguments.
2725
2726 // Push constructor on the stack. If it's not a function it's used as
2727 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2728 // ignored.
2729 DCHECK(!expr->expression()->IsSuperPropertyReference());
2730 VisitForStackValue(expr->expression());
2731
2732 // Push the arguments ("left-to-right") on the stack.
2733 ZoneList<Expression*>* args = expr->arguments();
2734 int arg_count = args->length();
2735 for (int i = 0; i < arg_count; i++) {
2736 VisitForStackValue(args->at(i));
2737 }
2738
2739 // Call the construct call builtin that handles allocation and
2740 // constructor invocation.
2741 SetConstructCallPosition(expr);
2742
2743 // Load function and argument count into edi and eax.
2744 __ Move(eax, Immediate(arg_count));
2745 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2746
2747 // Record call targets in unoptimized code.
2748 __ EmitLoadTypeFeedbackVector(ebx);
2749 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2750
2751 CallConstructStub stub(isolate());
2752 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002753 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002754 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2755 // Restore context register.
2756 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2757 context()->Plug(eax);
2758}
2759
2760
2761void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2762 SuperCallReference* super_call_ref =
2763 expr->expression()->AsSuperCallReference();
2764 DCHECK_NOT_NULL(super_call_ref);
2765
2766 // Push the super constructor target on the stack (may be null,
2767 // but the Construct builtin can deal with that properly).
2768 VisitForAccumulatorValue(super_call_ref->this_function_var());
2769 __ AssertFunction(result_register());
2770 __ mov(result_register(),
2771 FieldOperand(result_register(), HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002772 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002773
2774 // Push the arguments ("left-to-right") on the stack.
2775 ZoneList<Expression*>* args = expr->arguments();
2776 int arg_count = args->length();
2777 for (int i = 0; i < arg_count; i++) {
2778 VisitForStackValue(args->at(i));
2779 }
2780
2781 // Call the construct call builtin that handles allocation and
2782 // constructor invocation.
2783 SetConstructCallPosition(expr);
2784
2785 // Load new target into edx.
2786 VisitForAccumulatorValue(super_call_ref->new_target_var());
2787 __ mov(edx, result_register());
2788
2789 // Load function and argument count into edi and eax.
2790 __ Move(eax, Immediate(arg_count));
2791 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2792
2793 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002794 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002795
2796 RecordJSReturnSite(expr);
2797
2798 // Restore context register.
2799 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2800 context()->Plug(eax);
2801}
2802
2803
2804void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2805 ZoneList<Expression*>* args = expr->arguments();
2806 DCHECK(args->length() == 1);
2807
2808 VisitForAccumulatorValue(args->at(0));
2809
2810 Label materialize_true, materialize_false;
2811 Label* if_true = NULL;
2812 Label* if_false = NULL;
2813 Label* fall_through = NULL;
2814 context()->PrepareTest(&materialize_true, &materialize_false,
2815 &if_true, &if_false, &fall_through);
2816
2817 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2818 __ test(eax, Immediate(kSmiTagMask));
2819 Split(zero, if_true, if_false, fall_through);
2820
2821 context()->Plug(if_true, if_false);
2822}
2823
2824
2825void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2826 ZoneList<Expression*>* args = expr->arguments();
2827 DCHECK(args->length() == 1);
2828
2829 VisitForAccumulatorValue(args->at(0));
2830
2831 Label materialize_true, materialize_false;
2832 Label* if_true = NULL;
2833 Label* if_false = NULL;
2834 Label* fall_through = NULL;
2835 context()->PrepareTest(&materialize_true, &materialize_false,
2836 &if_true, &if_false, &fall_through);
2837
2838 __ JumpIfSmi(eax, if_false);
2839 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2840 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2841 Split(above_equal, if_true, if_false, fall_through);
2842
2843 context()->Plug(if_true, if_false);
2844}
2845
2846
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002847void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2848 ZoneList<Expression*>* args = expr->arguments();
2849 DCHECK(args->length() == 1);
2850
2851 VisitForAccumulatorValue(args->at(0));
2852
2853 Label materialize_true, materialize_false;
2854 Label* if_true = NULL;
2855 Label* if_false = NULL;
2856 Label* fall_through = NULL;
2857 context()->PrepareTest(&materialize_true, &materialize_false,
2858 &if_true, &if_false, &fall_through);
2859
2860 __ JumpIfSmi(eax, if_false);
2861 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2862 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2863 Split(equal, if_true, if_false, fall_through);
2864
2865 context()->Plug(if_true, if_false);
2866}
2867
2868
2869void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2870 ZoneList<Expression*>* args = expr->arguments();
2871 DCHECK(args->length() == 1);
2872
2873 VisitForAccumulatorValue(args->at(0));
2874
2875 Label materialize_true, materialize_false;
2876 Label* if_true = NULL;
2877 Label* if_false = NULL;
2878 Label* fall_through = NULL;
2879 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2880 &if_false, &fall_through);
2881
2882 __ JumpIfSmi(eax, if_false);
2883 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2884 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2885 Split(equal, if_true, if_false, fall_through);
2886
2887 context()->Plug(if_true, if_false);
2888}
2889
2890
2891void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2892 ZoneList<Expression*>* args = expr->arguments();
2893 DCHECK(args->length() == 1);
2894
2895 VisitForAccumulatorValue(args->at(0));
2896
2897 Label materialize_true, materialize_false;
2898 Label* if_true = NULL;
2899 Label* if_false = NULL;
2900 Label* fall_through = NULL;
2901 context()->PrepareTest(&materialize_true, &materialize_false,
2902 &if_true, &if_false, &fall_through);
2903
2904 __ JumpIfSmi(eax, if_false);
2905 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2906 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2907 Split(equal, if_true, if_false, fall_through);
2908
2909 context()->Plug(if_true, if_false);
2910}
2911
2912
2913void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2914 ZoneList<Expression*>* args = expr->arguments();
2915 DCHECK(args->length() == 1);
2916
2917 VisitForAccumulatorValue(args->at(0));
2918
2919 Label materialize_true, materialize_false;
2920 Label* if_true = NULL;
2921 Label* if_false = NULL;
2922 Label* fall_through = NULL;
2923 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2924 &if_false, &fall_through);
2925
2926 __ JumpIfSmi(eax, if_false);
2927 __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2928 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2929 Split(equal, if_true, if_false, fall_through);
2930
2931 context()->Plug(if_true, if_false);
2932}
2933
2934
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002935void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2936 ZoneList<Expression*>* args = expr->arguments();
2937 DCHECK(args->length() == 1);
2938 Label done, null, function, non_function_constructor;
2939
2940 VisitForAccumulatorValue(args->at(0));
2941
2942 // If the object is not a JSReceiver, we return null.
2943 __ JumpIfSmi(eax, &null, Label::kNear);
2944 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2945 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2946 __ j(below, &null, Label::kNear);
2947
2948 // Return 'Function' for JSFunction objects.
2949 __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
2950 __ j(equal, &function, Label::kNear);
2951
2952 // Check if the constructor in the map is a JS function.
2953 __ GetMapConstructor(eax, eax, ebx);
2954 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2955 __ j(not_equal, &non_function_constructor, Label::kNear);
2956
2957 // eax now contains the constructor function. Grab the
2958 // instance class name from there.
2959 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2960 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2961 __ jmp(&done, Label::kNear);
2962
2963 // Non-JS objects have class null.
2964 __ bind(&null);
2965 __ mov(eax, isolate()->factory()->null_value());
2966 __ jmp(&done, Label::kNear);
2967
2968 // Functions have class 'Function'.
2969 __ bind(&function);
2970 __ mov(eax, isolate()->factory()->Function_string());
2971 __ jmp(&done, Label::kNear);
2972
2973 // Objects with a non-function constructor have class 'Object'.
2974 __ bind(&non_function_constructor);
2975 __ mov(eax, isolate()->factory()->Object_string());
2976
2977 // All done.
2978 __ bind(&done);
2979
2980 context()->Plug(eax);
2981}
2982
2983
2984void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2985 ZoneList<Expression*>* args = expr->arguments();
2986 DCHECK(args->length() == 1);
2987
2988 VisitForAccumulatorValue(args->at(0)); // Load the object.
2989
2990 Label done;
2991 // If the object is a smi return the object.
2992 __ JumpIfSmi(eax, &done, Label::kNear);
2993 // If the object is not a value type, return the object.
2994 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2995 __ j(not_equal, &done, Label::kNear);
2996 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2997
2998 __ bind(&done);
2999 context()->Plug(eax);
3000}
3001
3002
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003003void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3004 ZoneList<Expression*>* args = expr->arguments();
3005 DCHECK_EQ(3, args->length());
3006
3007 Register string = eax;
3008 Register index = ebx;
3009 Register value = ecx;
3010
3011 VisitForStackValue(args->at(0)); // index
3012 VisitForStackValue(args->at(1)); // value
3013 VisitForAccumulatorValue(args->at(2)); // string
3014
Ben Murdoch097c5b22016-05-18 11:27:45 +01003015 PopOperand(value);
3016 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003017
3018 if (FLAG_debug_code) {
3019 __ test(value, Immediate(kSmiTagMask));
3020 __ Check(zero, kNonSmiValue);
3021 __ test(index, Immediate(kSmiTagMask));
3022 __ Check(zero, kNonSmiValue);
3023 }
3024
3025 __ SmiUntag(value);
3026 __ SmiUntag(index);
3027
3028 if (FLAG_debug_code) {
3029 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3030 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3031 }
3032
3033 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3034 value);
3035 context()->Plug(string);
3036}
3037
3038
3039void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3040 ZoneList<Expression*>* args = expr->arguments();
3041 DCHECK_EQ(3, args->length());
3042
3043 Register string = eax;
3044 Register index = ebx;
3045 Register value = ecx;
3046
3047 VisitForStackValue(args->at(0)); // index
3048 VisitForStackValue(args->at(1)); // value
3049 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003050 PopOperand(value);
3051 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003052
3053 if (FLAG_debug_code) {
3054 __ test(value, Immediate(kSmiTagMask));
3055 __ Check(zero, kNonSmiValue);
3056 __ test(index, Immediate(kSmiTagMask));
3057 __ Check(zero, kNonSmiValue);
3058 __ SmiUntag(index);
3059 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3060 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3061 __ SmiTag(index);
3062 }
3063
3064 __ SmiUntag(value);
3065 // No need to untag a smi for two-byte addressing.
3066 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3067 value);
3068 context()->Plug(string);
3069}
3070
3071
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003072void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3073 ZoneList<Expression*>* args = expr->arguments();
3074 DCHECK_EQ(1, args->length());
3075
3076 // Load the argument into eax and convert it.
3077 VisitForAccumulatorValue(args->at(0));
3078
3079 // Convert the object to an integer.
3080 Label done_convert;
3081 __ JumpIfSmi(eax, &done_convert, Label::kNear);
3082 __ Push(eax);
3083 __ CallRuntime(Runtime::kToInteger);
3084 __ bind(&done_convert);
3085 context()->Plug(eax);
3086}
3087
3088
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003089void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3090 ZoneList<Expression*>* args = expr->arguments();
3091 DCHECK(args->length() == 1);
3092
3093 VisitForAccumulatorValue(args->at(0));
3094
3095 Label done;
3096 StringCharFromCodeGenerator generator(eax, ebx);
3097 generator.GenerateFast(masm_);
3098 __ jmp(&done);
3099
3100 NopRuntimeCallHelper call_helper;
3101 generator.GenerateSlow(masm_, call_helper);
3102
3103 __ bind(&done);
3104 context()->Plug(ebx);
3105}
3106
3107
3108void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3109 ZoneList<Expression*>* args = expr->arguments();
3110 DCHECK(args->length() == 2);
3111
3112 VisitForStackValue(args->at(0));
3113 VisitForAccumulatorValue(args->at(1));
3114
3115 Register object = ebx;
3116 Register index = eax;
3117 Register result = edx;
3118
Ben Murdoch097c5b22016-05-18 11:27:45 +01003119 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003120
3121 Label need_conversion;
3122 Label index_out_of_range;
3123 Label done;
3124 StringCharCodeAtGenerator generator(object,
3125 index,
3126 result,
3127 &need_conversion,
3128 &need_conversion,
3129 &index_out_of_range,
3130 STRING_INDEX_IS_NUMBER);
3131 generator.GenerateFast(masm_);
3132 __ jmp(&done);
3133
3134 __ bind(&index_out_of_range);
3135 // When the index is out of range, the spec requires us to return
3136 // NaN.
3137 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3138 __ jmp(&done);
3139
3140 __ bind(&need_conversion);
3141 // Move the undefined value into the result register, which will
3142 // trigger conversion.
3143 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3144 __ jmp(&done);
3145
3146 NopRuntimeCallHelper call_helper;
3147 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3148
3149 __ bind(&done);
3150 context()->Plug(result);
3151}
3152
3153
3154void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3155 ZoneList<Expression*>* args = expr->arguments();
3156 DCHECK(args->length() == 2);
3157
3158 VisitForStackValue(args->at(0));
3159 VisitForAccumulatorValue(args->at(1));
3160
3161 Register object = ebx;
3162 Register index = eax;
3163 Register scratch = edx;
3164 Register result = eax;
3165
Ben Murdoch097c5b22016-05-18 11:27:45 +01003166 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003167
3168 Label need_conversion;
3169 Label index_out_of_range;
3170 Label done;
3171 StringCharAtGenerator generator(object,
3172 index,
3173 scratch,
3174 result,
3175 &need_conversion,
3176 &need_conversion,
3177 &index_out_of_range,
3178 STRING_INDEX_IS_NUMBER);
3179 generator.GenerateFast(masm_);
3180 __ jmp(&done);
3181
3182 __ bind(&index_out_of_range);
3183 // When the index is out of range, the spec requires us to return
3184 // the empty string.
3185 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3186 __ jmp(&done);
3187
3188 __ bind(&need_conversion);
3189 // Move smi zero into the result register, which will trigger
3190 // conversion.
3191 __ Move(result, Immediate(Smi::FromInt(0)));
3192 __ jmp(&done);
3193
3194 NopRuntimeCallHelper call_helper;
3195 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3196
3197 __ bind(&done);
3198 context()->Plug(result);
3199}
3200
3201
3202void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3203 ZoneList<Expression*>* args = expr->arguments();
3204 DCHECK_LE(2, args->length());
3205 // Push target, receiver and arguments onto the stack.
3206 for (Expression* const arg : *args) {
3207 VisitForStackValue(arg);
3208 }
3209 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3210 // Move target to edi.
3211 int const argc = args->length() - 2;
3212 __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
3213 // Call the target.
3214 __ mov(eax, Immediate(argc));
3215 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003216 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003217 // Restore context register.
3218 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3219 // Discard the function left on TOS.
3220 context()->DropAndPlug(1, eax);
3221}
3222
3223
3224void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3225 ZoneList<Expression*>* args = expr->arguments();
3226 DCHECK(args->length() == 1);
3227
3228 VisitForAccumulatorValue(args->at(0));
3229
3230 __ AssertString(eax);
3231
3232 Label materialize_true, materialize_false;
3233 Label* if_true = NULL;
3234 Label* if_false = NULL;
3235 Label* fall_through = NULL;
3236 context()->PrepareTest(&materialize_true, &materialize_false,
3237 &if_true, &if_false, &fall_through);
3238
3239 __ test(FieldOperand(eax, String::kHashFieldOffset),
3240 Immediate(String::kContainsCachedArrayIndexMask));
3241 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3242 Split(zero, if_true, if_false, fall_through);
3243
3244 context()->Plug(if_true, if_false);
3245}
3246
3247
3248void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3249 ZoneList<Expression*>* args = expr->arguments();
3250 DCHECK(args->length() == 1);
3251 VisitForAccumulatorValue(args->at(0));
3252
3253 __ AssertString(eax);
3254
3255 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3256 __ IndexFromHash(eax, eax);
3257
3258 context()->Plug(eax);
3259}
3260
3261
3262void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3263 ZoneList<Expression*>* args = expr->arguments();
3264 DCHECK_EQ(1, args->length());
3265 VisitForAccumulatorValue(args->at(0));
3266 __ AssertFunction(eax);
3267 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3268 __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
3269 context()->Plug(eax);
3270}
3271
3272
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003273void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3274 DCHECK(expr->arguments()->length() == 0);
3275 ExternalReference debug_is_active =
3276 ExternalReference::debug_is_active_address(isolate());
3277 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
3278 __ SmiTag(eax);
3279 context()->Plug(eax);
3280}
3281
3282
3283void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3284 ZoneList<Expression*>* args = expr->arguments();
3285 DCHECK_EQ(2, args->length());
3286 VisitForStackValue(args->at(0));
3287 VisitForStackValue(args->at(1));
3288
3289 Label runtime, done;
3290
3291 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime, TAG_OBJECT);
3292 __ mov(ebx, NativeContextOperand());
3293 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
3294 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
3295 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3296 isolate()->factory()->empty_fixed_array());
3297 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3298 isolate()->factory()->empty_fixed_array());
3299 __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
3300 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
3301 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3302 __ jmp(&done, Label::kNear);
3303
3304 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003305 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003306
3307 __ bind(&done);
3308 context()->Plug(eax);
3309}
3310
3311
3312void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3313 // Push undefined as receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003314 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003315
3316 __ LoadGlobalFunction(expr->context_index(), eax);
3317}
3318
3319
3320void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3321 ZoneList<Expression*>* args = expr->arguments();
3322 int arg_count = args->length();
3323
3324 SetCallPosition(expr);
3325 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3326 __ Set(eax, arg_count);
3327 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3328 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003329 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003330}
3331
3332
3333void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3334 ZoneList<Expression*>* args = expr->arguments();
3335 int arg_count = args->length();
3336
3337 if (expr->is_jsruntime()) {
3338 Comment cmnt(masm_, "[ CallRuntime");
3339 EmitLoadJSRuntimeFunction(expr);
3340
3341 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003342 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003343 __ mov(Operand(esp, kPointerSize), eax);
3344
3345 // Push the arguments ("left-to-right").
3346 for (int i = 0; i < arg_count; i++) {
3347 VisitForStackValue(args->at(i));
3348 }
3349
3350 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3351 EmitCallJSRuntimeFunction(expr);
3352
3353 // Restore context register.
3354 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3355 context()->DropAndPlug(1, eax);
3356
3357 } else {
3358 const Runtime::Function* function = expr->function();
3359 switch (function->function_id) {
3360#define CALL_INTRINSIC_GENERATOR(Name) \
3361 case Runtime::kInline##Name: { \
3362 Comment cmnt(masm_, "[ Inline" #Name); \
3363 return Emit##Name(expr); \
3364 }
3365 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3366#undef CALL_INTRINSIC_GENERATOR
3367 default: {
3368 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3369 // Push the arguments ("left-to-right").
3370 for (int i = 0; i < arg_count; i++) {
3371 VisitForStackValue(args->at(i));
3372 }
3373
3374 // Call the C runtime function.
3375 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3376 __ CallRuntime(expr->function(), arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003377 OperandStackDepthDecrement(arg_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003378 context()->Plug(eax);
3379 }
3380 }
3381 }
3382}
3383
3384
3385void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3386 switch (expr->op()) {
3387 case Token::DELETE: {
3388 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3389 Property* property = expr->expression()->AsProperty();
3390 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3391
3392 if (property != NULL) {
3393 VisitForStackValue(property->obj());
3394 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003395 CallRuntimeWithOperands(is_strict(language_mode())
3396 ? Runtime::kDeleteProperty_Strict
3397 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003398 context()->Plug(eax);
3399 } else if (proxy != NULL) {
3400 Variable* var = proxy->var();
3401 // Delete of an unqualified identifier is disallowed in strict mode but
3402 // "delete this" is allowed.
3403 bool is_this = var->HasThisName(isolate());
3404 DCHECK(is_sloppy(language_mode()) || is_this);
3405 if (var->IsUnallocatedOrGlobalSlot()) {
3406 __ mov(eax, NativeContextOperand());
3407 __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
3408 __ push(Immediate(var->name()));
3409 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3410 context()->Plug(eax);
3411 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3412 // Result of deleting non-global variables is false. 'this' is
3413 // not really a variable, though we implement it as one. The
3414 // subexpression does not have side effects.
3415 context()->Plug(is_this);
3416 } else {
3417 // Non-global variable. Call the runtime to try to delete from the
3418 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003419 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003420 __ CallRuntime(Runtime::kDeleteLookupSlot);
3421 context()->Plug(eax);
3422 }
3423 } else {
3424 // Result of deleting non-property, non-variable reference is true.
3425 // The subexpression may have side effects.
3426 VisitForEffect(expr->expression());
3427 context()->Plug(true);
3428 }
3429 break;
3430 }
3431
3432 case Token::VOID: {
3433 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3434 VisitForEffect(expr->expression());
3435 context()->Plug(isolate()->factory()->undefined_value());
3436 break;
3437 }
3438
3439 case Token::NOT: {
3440 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3441 if (context()->IsEffect()) {
3442 // Unary NOT has no side effects so it's only necessary to visit the
3443 // subexpression. Match the optimizing compiler by not branching.
3444 VisitForEffect(expr->expression());
3445 } else if (context()->IsTest()) {
3446 const TestContext* test = TestContext::cast(context());
3447 // The labels are swapped for the recursive call.
3448 VisitForControl(expr->expression(),
3449 test->false_label(),
3450 test->true_label(),
3451 test->fall_through());
3452 context()->Plug(test->true_label(), test->false_label());
3453 } else {
3454 // We handle value contexts explicitly rather than simply visiting
3455 // for control and plugging the control flow into the context,
3456 // because we need to prepare a pair of extra administrative AST ids
3457 // for the optimizing compiler.
3458 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3459 Label materialize_true, materialize_false, done;
3460 VisitForControl(expr->expression(),
3461 &materialize_false,
3462 &materialize_true,
3463 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003464 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003465 __ bind(&materialize_true);
3466 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3467 if (context()->IsAccumulatorValue()) {
3468 __ mov(eax, isolate()->factory()->true_value());
3469 } else {
3470 __ Push(isolate()->factory()->true_value());
3471 }
3472 __ jmp(&done, Label::kNear);
3473 __ bind(&materialize_false);
3474 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3475 if (context()->IsAccumulatorValue()) {
3476 __ mov(eax, isolate()->factory()->false_value());
3477 } else {
3478 __ Push(isolate()->factory()->false_value());
3479 }
3480 __ bind(&done);
3481 }
3482 break;
3483 }
3484
3485 case Token::TYPEOF: {
3486 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3487 {
3488 AccumulatorValueContext context(this);
3489 VisitForTypeofValue(expr->expression());
3490 }
3491 __ mov(ebx, eax);
3492 TypeofStub typeof_stub(isolate());
3493 __ CallStub(&typeof_stub);
3494 context()->Plug(eax);
3495 break;
3496 }
3497
3498 default:
3499 UNREACHABLE();
3500 }
3501}
3502
3503
3504void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3505 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3506
3507 Comment cmnt(masm_, "[ CountOperation");
3508
3509 Property* prop = expr->expression()->AsProperty();
3510 LhsKind assign_type = Property::GetAssignType(prop);
3511
3512 // Evaluate expression and get value.
3513 if (assign_type == VARIABLE) {
3514 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3515 AccumulatorValueContext context(this);
3516 EmitVariableLoad(expr->expression()->AsVariableProxy());
3517 } else {
3518 // Reserve space for result of postfix operation.
3519 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003520 PushOperand(Smi::FromInt(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003521 }
3522 switch (assign_type) {
3523 case NAMED_PROPERTY: {
3524 // Put the object both on the stack and in the register.
3525 VisitForStackValue(prop->obj());
3526 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
3527 EmitNamedPropertyLoad(prop);
3528 break;
3529 }
3530
3531 case NAMED_SUPER_PROPERTY: {
3532 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3533 VisitForAccumulatorValue(
3534 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003535 PushOperand(result_register());
3536 PushOperand(MemOperand(esp, kPointerSize));
3537 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003538 EmitNamedSuperPropertyLoad(prop);
3539 break;
3540 }
3541
3542 case KEYED_SUPER_PROPERTY: {
3543 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3544 VisitForStackValue(
3545 prop->obj()->AsSuperPropertyReference()->home_object());
3546 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003547 PushOperand(result_register());
3548 PushOperand(MemOperand(esp, 2 * kPointerSize));
3549 PushOperand(MemOperand(esp, 2 * kPointerSize));
3550 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003551 EmitKeyedSuperPropertyLoad(prop);
3552 break;
3553 }
3554
3555 case KEYED_PROPERTY: {
3556 VisitForStackValue(prop->obj());
3557 VisitForStackValue(prop->key());
3558 __ mov(LoadDescriptor::ReceiverRegister(),
3559 Operand(esp, kPointerSize)); // Object.
3560 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
3561 EmitKeyedPropertyLoad(prop);
3562 break;
3563 }
3564
3565 case VARIABLE:
3566 UNREACHABLE();
3567 }
3568 }
3569
3570 // We need a second deoptimization point after loading the value
3571 // in case evaluating the property load my have a side effect.
3572 if (assign_type == VARIABLE) {
3573 PrepareForBailout(expr->expression(), TOS_REG);
3574 } else {
3575 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3576 }
3577
3578 // Inline smi case if we are in a loop.
3579 Label done, stub_call;
3580 JumpPatchSite patch_site(masm_);
3581 if (ShouldInlineSmiCase(expr->op())) {
3582 Label slow;
3583 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
3584
3585 // Save result for postfix expressions.
3586 if (expr->is_postfix()) {
3587 if (!context()->IsEffect()) {
3588 // Save the result on the stack. If we have a named or keyed property
3589 // we store the result under the receiver that is currently on top
3590 // of the stack.
3591 switch (assign_type) {
3592 case VARIABLE:
3593 __ push(eax);
3594 break;
3595 case NAMED_PROPERTY:
3596 __ mov(Operand(esp, kPointerSize), eax);
3597 break;
3598 case NAMED_SUPER_PROPERTY:
3599 __ mov(Operand(esp, 2 * kPointerSize), eax);
3600 break;
3601 case KEYED_PROPERTY:
3602 __ mov(Operand(esp, 2 * kPointerSize), eax);
3603 break;
3604 case KEYED_SUPER_PROPERTY:
3605 __ mov(Operand(esp, 3 * kPointerSize), eax);
3606 break;
3607 }
3608 }
3609 }
3610
3611 if (expr->op() == Token::INC) {
3612 __ add(eax, Immediate(Smi::FromInt(1)));
3613 } else {
3614 __ sub(eax, Immediate(Smi::FromInt(1)));
3615 }
3616 __ j(no_overflow, &done, Label::kNear);
3617 // Call stub. Undo operation first.
3618 if (expr->op() == Token::INC) {
3619 __ sub(eax, Immediate(Smi::FromInt(1)));
3620 } else {
3621 __ add(eax, Immediate(Smi::FromInt(1)));
3622 }
3623 __ jmp(&stub_call, Label::kNear);
3624 __ bind(&slow);
3625 }
3626 if (!is_strong(language_mode())) {
3627 ToNumberStub convert_stub(isolate());
3628 __ CallStub(&convert_stub);
3629 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3630 }
3631
3632 // Save result for postfix expressions.
3633 if (expr->is_postfix()) {
3634 if (!context()->IsEffect()) {
3635 // Save the result on the stack. If we have a named or keyed property
3636 // we store the result under the receiver that is currently on top
3637 // of the stack.
3638 switch (assign_type) {
3639 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003640 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003641 break;
3642 case NAMED_PROPERTY:
3643 __ mov(Operand(esp, kPointerSize), eax);
3644 break;
3645 case NAMED_SUPER_PROPERTY:
3646 __ mov(Operand(esp, 2 * kPointerSize), eax);
3647 break;
3648 case KEYED_PROPERTY:
3649 __ mov(Operand(esp, 2 * kPointerSize), eax);
3650 break;
3651 case KEYED_SUPER_PROPERTY:
3652 __ mov(Operand(esp, 3 * kPointerSize), eax);
3653 break;
3654 }
3655 }
3656 }
3657
3658 SetExpressionPosition(expr);
3659
3660 // Call stub for +1/-1.
3661 __ bind(&stub_call);
3662 __ mov(edx, eax);
3663 __ mov(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003664 Handle<Code> code =
3665 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003666 CallIC(code, expr->CountBinOpFeedbackId());
3667 patch_site.EmitPatchInfo();
3668 __ bind(&done);
3669
3670 if (is_strong(language_mode())) {
3671 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3672 }
3673 // Store the value returned in eax.
3674 switch (assign_type) {
3675 case VARIABLE:
3676 if (expr->is_postfix()) {
3677 // Perform the assignment as if via '='.
3678 { EffectContext context(this);
3679 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3680 Token::ASSIGN, expr->CountSlot());
3681 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3682 context.Plug(eax);
3683 }
3684 // For all contexts except EffectContext We have the result on
3685 // top of the stack.
3686 if (!context()->IsEffect()) {
3687 context()->PlugTOS();
3688 }
3689 } else {
3690 // Perform the assignment as if via '='.
3691 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3692 Token::ASSIGN, expr->CountSlot());
3693 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3694 context()->Plug(eax);
3695 }
3696 break;
3697 case NAMED_PROPERTY: {
3698 __ mov(StoreDescriptor::NameRegister(),
3699 prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003700 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003701 EmitLoadStoreICSlot(expr->CountSlot());
3702 CallStoreIC();
3703 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3704 if (expr->is_postfix()) {
3705 if (!context()->IsEffect()) {
3706 context()->PlugTOS();
3707 }
3708 } else {
3709 context()->Plug(eax);
3710 }
3711 break;
3712 }
3713 case NAMED_SUPER_PROPERTY: {
3714 EmitNamedSuperPropertyStore(prop);
3715 if (expr->is_postfix()) {
3716 if (!context()->IsEffect()) {
3717 context()->PlugTOS();
3718 }
3719 } else {
3720 context()->Plug(eax);
3721 }
3722 break;
3723 }
3724 case KEYED_SUPER_PROPERTY: {
3725 EmitKeyedSuperPropertyStore(prop);
3726 if (expr->is_postfix()) {
3727 if (!context()->IsEffect()) {
3728 context()->PlugTOS();
3729 }
3730 } else {
3731 context()->Plug(eax);
3732 }
3733 break;
3734 }
3735 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003736 PopOperand(StoreDescriptor::NameRegister());
3737 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003738 Handle<Code> ic =
3739 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3740 EmitLoadStoreICSlot(expr->CountSlot());
3741 CallIC(ic);
3742 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3743 if (expr->is_postfix()) {
3744 // Result is on the stack
3745 if (!context()->IsEffect()) {
3746 context()->PlugTOS();
3747 }
3748 } else {
3749 context()->Plug(eax);
3750 }
3751 break;
3752 }
3753 }
3754}
3755
3756
3757void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3758 Expression* sub_expr,
3759 Handle<String> check) {
3760 Label materialize_true, materialize_false;
3761 Label* if_true = NULL;
3762 Label* if_false = NULL;
3763 Label* fall_through = NULL;
3764 context()->PrepareTest(&materialize_true, &materialize_false,
3765 &if_true, &if_false, &fall_through);
3766
3767 { AccumulatorValueContext context(this);
3768 VisitForTypeofValue(sub_expr);
3769 }
3770 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3771
3772 Factory* factory = isolate()->factory();
3773 if (String::Equals(check, factory->number_string())) {
3774 __ JumpIfSmi(eax, if_true);
3775 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3776 isolate()->factory()->heap_number_map());
3777 Split(equal, if_true, if_false, fall_through);
3778 } else if (String::Equals(check, factory->string_string())) {
3779 __ JumpIfSmi(eax, if_false);
3780 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3781 Split(below, if_true, if_false, fall_through);
3782 } else if (String::Equals(check, factory->symbol_string())) {
3783 __ JumpIfSmi(eax, if_false);
3784 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3785 Split(equal, if_true, if_false, fall_through);
3786 } else if (String::Equals(check, factory->boolean_string())) {
3787 __ cmp(eax, isolate()->factory()->true_value());
3788 __ j(equal, if_true);
3789 __ cmp(eax, isolate()->factory()->false_value());
3790 Split(equal, if_true, if_false, fall_through);
3791 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003792 __ cmp(eax, isolate()->factory()->null_value());
3793 __ j(equal, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003794 __ JumpIfSmi(eax, if_false);
3795 // Check for undetectable objects => true.
3796 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3797 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3798 1 << Map::kIsUndetectable);
3799 Split(not_zero, if_true, if_false, fall_through);
3800 } else if (String::Equals(check, factory->function_string())) {
3801 __ JumpIfSmi(eax, if_false);
3802 // Check for callable and not undetectable objects => true.
3803 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3804 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3805 __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3806 __ cmp(ecx, 1 << Map::kIsCallable);
3807 Split(equal, if_true, if_false, fall_through);
3808 } else if (String::Equals(check, factory->object_string())) {
3809 __ JumpIfSmi(eax, if_false);
3810 __ cmp(eax, isolate()->factory()->null_value());
3811 __ j(equal, if_true);
3812 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3813 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3814 __ j(below, if_false);
3815 // Check for callable or undetectable objects => false.
3816 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3817 (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3818 Split(zero, if_true, if_false, fall_through);
3819// clang-format off
3820#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3821 } else if (String::Equals(check, factory->type##_string())) { \
3822 __ JumpIfSmi(eax, if_false); \
3823 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \
3824 isolate()->factory()->type##_map()); \
3825 Split(equal, if_true, if_false, fall_through);
3826 SIMD128_TYPES(SIMD128_TYPE)
3827#undef SIMD128_TYPE
3828 // clang-format on
3829 } else {
3830 if (if_false != fall_through) __ jmp(if_false);
3831 }
3832 context()->Plug(if_true, if_false);
3833}
3834
3835
3836void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3837 Comment cmnt(masm_, "[ CompareOperation");
3838 SetExpressionPosition(expr);
3839
3840 // First we try a fast inlined version of the compare when one of
3841 // the operands is a literal.
3842 if (TryLiteralCompare(expr)) return;
3843
3844 // Always perform the comparison for its control flow. Pack the result
3845 // into the expression's context after the comparison is performed.
3846 Label materialize_true, materialize_false;
3847 Label* if_true = NULL;
3848 Label* if_false = NULL;
3849 Label* fall_through = NULL;
3850 context()->PrepareTest(&materialize_true, &materialize_false,
3851 &if_true, &if_false, &fall_through);
3852
3853 Token::Value op = expr->op();
3854 VisitForStackValue(expr->left());
3855 switch (op) {
3856 case Token::IN:
3857 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003858 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003859 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3860 __ cmp(eax, isolate()->factory()->true_value());
3861 Split(equal, if_true, if_false, fall_through);
3862 break;
3863
3864 case Token::INSTANCEOF: {
3865 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003866 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003867 InstanceOfStub stub(isolate());
3868 __ CallStub(&stub);
3869 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3870 __ cmp(eax, isolate()->factory()->true_value());
3871 Split(equal, if_true, if_false, fall_through);
3872 break;
3873 }
3874
3875 default: {
3876 VisitForAccumulatorValue(expr->right());
3877 Condition cc = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003878 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003879
3880 bool inline_smi_code = ShouldInlineSmiCase(op);
3881 JumpPatchSite patch_site(masm_);
3882 if (inline_smi_code) {
3883 Label slow_case;
3884 __ mov(ecx, edx);
3885 __ or_(ecx, eax);
3886 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3887 __ cmp(edx, eax);
3888 Split(cc, if_true, if_false, NULL);
3889 __ bind(&slow_case);
3890 }
3891
Ben Murdoch097c5b22016-05-18 11:27:45 +01003892 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003893 CallIC(ic, expr->CompareOperationFeedbackId());
3894 patch_site.EmitPatchInfo();
3895
3896 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3897 __ test(eax, eax);
3898 Split(cc, if_true, if_false, fall_through);
3899 }
3900 }
3901
3902 // Convert the result of the comparison into one expected for this
3903 // expression's context.
3904 context()->Plug(if_true, if_false);
3905}
3906
3907
3908void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3909 Expression* sub_expr,
3910 NilValue nil) {
3911 Label materialize_true, materialize_false;
3912 Label* if_true = NULL;
3913 Label* if_false = NULL;
3914 Label* fall_through = NULL;
3915 context()->PrepareTest(&materialize_true, &materialize_false,
3916 &if_true, &if_false, &fall_through);
3917
3918 VisitForAccumulatorValue(sub_expr);
3919 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3920
3921 Handle<Object> nil_value = nil == kNullValue
3922 ? isolate()->factory()->null_value()
3923 : isolate()->factory()->undefined_value();
3924 if (expr->op() == Token::EQ_STRICT) {
3925 __ cmp(eax, nil_value);
3926 Split(equal, if_true, if_false, fall_through);
3927 } else {
3928 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
3929 CallIC(ic, expr->CompareOperationFeedbackId());
3930 __ cmp(eax, isolate()->factory()->true_value());
3931 Split(equal, if_true, if_false, fall_through);
3932 }
3933 context()->Plug(if_true, if_false);
3934}
3935
3936
3937void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
3938 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3939 context()->Plug(eax);
3940}
3941
3942
3943Register FullCodeGenerator::result_register() {
3944 return eax;
3945}
3946
3947
3948Register FullCodeGenerator::context_register() {
3949 return esi;
3950}
3951
3952
3953void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3954 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3955 __ mov(Operand(ebp, frame_offset), value);
3956}
3957
3958
3959void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3960 __ mov(dst, ContextOperand(esi, context_index));
3961}
3962
3963
3964void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3965 Scope* closure_scope = scope()->ClosureScope();
3966 if (closure_scope->is_script_scope() ||
3967 closure_scope->is_module_scope()) {
3968 // Contexts nested in the native context have a canonical empty function
3969 // as their closure, not the anonymous closure containing the global
3970 // code.
3971 __ mov(eax, NativeContextOperand());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003972 PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003973 } else if (closure_scope->is_eval_scope()) {
3974 // Contexts nested inside eval code have the same closure as the context
3975 // calling eval, not the anonymous closure containing the eval code.
3976 // Fetch it from the context.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003977 PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003978 } else {
3979 DCHECK(closure_scope->is_function_scope());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003980 PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003981 }
3982}
3983
3984
3985// ----------------------------------------------------------------------------
3986// Non-local control flow support.
3987
3988void FullCodeGenerator::EnterFinallyBlock() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003989 // Store pending message while executing finally block.
3990 ExternalReference pending_message_obj =
3991 ExternalReference::address_of_pending_message_obj(isolate());
3992 __ mov(edx, Operand::StaticVariable(pending_message_obj));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003993 PushOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003994
3995 ClearPendingMessage();
3996}
3997
3998
3999void FullCodeGenerator::ExitFinallyBlock() {
4000 DCHECK(!result_register().is(edx));
4001 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004002 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004003 ExternalReference pending_message_obj =
4004 ExternalReference::address_of_pending_message_obj(isolate());
4005 __ mov(Operand::StaticVariable(pending_message_obj), edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004006}
4007
4008
4009void FullCodeGenerator::ClearPendingMessage() {
4010 DCHECK(!result_register().is(edx));
4011 ExternalReference pending_message_obj =
4012 ExternalReference::address_of_pending_message_obj(isolate());
4013 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
4014 __ mov(Operand::StaticVariable(pending_message_obj), edx);
4015}
4016
4017
4018void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4019 DCHECK(!slot.IsInvalid());
4020 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
4021 Immediate(SmiFromSlot(slot)));
4022}
4023
Ben Murdoch097c5b22016-05-18 11:27:45 +01004024void FullCodeGenerator::DeferredCommands::EmitCommands() {
4025 DCHECK(!result_register().is(edx));
4026 __ Pop(result_register()); // Restore the accumulator.
4027 __ Pop(edx); // Get the token.
4028 for (DeferredCommand cmd : commands_) {
4029 Label skip;
4030 __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
4031 __ j(not_equal, &skip);
4032 switch (cmd.command) {
4033 case kReturn:
4034 codegen_->EmitUnwindAndReturn();
4035 break;
4036 case kThrow:
4037 __ Push(result_register());
4038 __ CallRuntime(Runtime::kReThrow);
4039 break;
4040 case kContinue:
4041 codegen_->EmitContinue(cmd.target);
4042 break;
4043 case kBreak:
4044 codegen_->EmitBreak(cmd.target);
4045 break;
4046 }
4047 __ bind(&skip);
4048 }
4049}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004050
4051#undef __
4052
4053
4054static const byte kJnsInstruction = 0x79;
4055static const byte kJnsOffset = 0x11;
4056static const byte kNopByteOne = 0x66;
4057static const byte kNopByteTwo = 0x90;
4058#ifdef DEBUG
4059static const byte kCallInstruction = 0xe8;
4060#endif
4061
4062
4063void BackEdgeTable::PatchAt(Code* unoptimized_code,
4064 Address pc,
4065 BackEdgeState target_state,
4066 Code* replacement_code) {
4067 Address call_target_address = pc - kIntSize;
4068 Address jns_instr_address = call_target_address - 3;
4069 Address jns_offset_address = call_target_address - 2;
4070
4071 switch (target_state) {
4072 case INTERRUPT:
4073 // sub <profiling_counter>, <delta> ;; Not changed
4074 // jns ok
4075 // call <interrupt stub>
4076 // ok:
4077 *jns_instr_address = kJnsInstruction;
4078 *jns_offset_address = kJnsOffset;
4079 break;
4080 case ON_STACK_REPLACEMENT:
4081 case OSR_AFTER_STACK_CHECK:
4082 // sub <profiling_counter>, <delta> ;; Not changed
4083 // nop
4084 // nop
4085 // call <on-stack replacment>
4086 // ok:
4087 *jns_instr_address = kNopByteOne;
4088 *jns_offset_address = kNopByteTwo;
4089 break;
4090 }
4091
4092 Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
4093 call_target_address, unoptimized_code,
4094 replacement_code->entry());
4095 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4096 unoptimized_code, call_target_address, replacement_code);
4097}
4098
4099
4100BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4101 Isolate* isolate,
4102 Code* unoptimized_code,
4103 Address pc) {
4104 Address call_target_address = pc - kIntSize;
4105 Address jns_instr_address = call_target_address - 3;
4106 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4107
4108 if (*jns_instr_address == kJnsInstruction) {
4109 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4110 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4111 Assembler::target_address_at(call_target_address,
4112 unoptimized_code));
4113 return INTERRUPT;
4114 }
4115
4116 DCHECK_EQ(kNopByteOne, *jns_instr_address);
4117 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4118
4119 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4120 isolate->builtins()->OnStackReplacement()->entry()) {
4121 return ON_STACK_REPLACEMENT;
4122 }
4123
4124 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4125 Assembler::target_address_at(call_target_address,
4126 unoptimized_code));
4127 return OSR_AFTER_STACK_CHECK;
4128}
4129
4130
4131} // namespace internal
4132} // namespace v8
4133
4134#endif // V8_TARGET_ARCH_X87