blob: f14aaf69b02d971265db4608951e9c14fc2518a8 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X87
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15#include "src/x87/frames-x87.h"
16
17namespace v8 {
18namespace internal {
19
Ben Murdoch097c5b22016-05-18 11:27:45 +010020#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021
22class JumpPatchSite BASE_EMBEDDED {
23 public:
24 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
25#ifdef DEBUG
26 info_emitted_ = false;
27#endif
28 }
29
30 ~JumpPatchSite() {
31 DCHECK(patch_site_.is_bound() == info_emitted_);
32 }
33
34 void EmitJumpIfNotSmi(Register reg,
35 Label* target,
36 Label::Distance distance = Label::kFar) {
37 __ test(reg, Immediate(kSmiTagMask));
38 EmitJump(not_carry, target, distance); // Always taken before patched.
39 }
40
41 void EmitJumpIfSmi(Register reg,
42 Label* target,
43 Label::Distance distance = Label::kFar) {
44 __ test(reg, Immediate(kSmiTagMask));
45 EmitJump(carry, target, distance); // Never taken before patched.
46 }
47
48 void EmitPatchInfo() {
49 if (patch_site_.is_bound()) {
50 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
51 DCHECK(is_uint8(delta_to_patch_site));
52 __ test(eax, Immediate(delta_to_patch_site));
53#ifdef DEBUG
54 info_emitted_ = true;
55#endif
56 } else {
57 __ nop(); // Signals no inlined code.
58 }
59 }
60
61 private:
62 // jc will be patched with jz, jnc will become jnz.
63 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
64 DCHECK(!patch_site_.is_bound() && !info_emitted_);
65 DCHECK(cc == carry || cc == not_carry);
66 __ bind(&patch_site_);
67 __ j(cc, target, distance);
68 }
69
Ben Murdoch097c5b22016-05-18 11:27:45 +010070 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 MacroAssembler* masm_;
72 Label patch_site_;
73#ifdef DEBUG
74 bool info_emitted_;
75#endif
76};
77
78
79// Generate code for a JS function. On entry to the function the receiver
80// and arguments have been pushed on the stack left to right, with the
81// return address on top of them. The actual argument count matches the
82// formal parameter count expected by the function.
83//
84// The live registers are:
85// o edi: the JS function object being called (i.e. ourselves)
86// o edx: the new target value
87// o esi: our context
88// o ebp: our caller's frame pointer
89// o esp: stack pointer (pointing to return address)
90//
91// The function builds a JS frame. Please see JavaScriptFrameConstants in
92// frames-x87.h for its layout.
93void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
95 profiling_counter_ = isolate()->factory()->NewCell(
96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97 SetFunctionPosition(literal());
98 Comment cmnt(masm_, "[ function compiled by full code generator");
99
100 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
103 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
104 __ mov(ecx, Operand(esp, receiver_offset));
105 __ AssertNotSmi(ecx);
106 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
107 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
108 }
109
110 // Open a frame scope to indicate that there is a frame on the stack. The
111 // MANUAL indicates that the scope shouldn't actually generate code to set up
112 // the frame (that is done below).
113 FrameScope frame_scope(masm_, StackFrame::MANUAL);
114
115 info->set_prologue_offset(masm_->pc_offset());
116 __ Prologue(info->GeneratePreagedPrologue());
117
118 { Comment cmnt(masm_, "[ Allocate locals");
119 int locals_count = info->scope()->num_stack_slots();
120 // Generators allocate locals, if any, in context slots.
121 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100122 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 if (locals_count == 1) {
124 __ push(Immediate(isolate()->factory()->undefined_value()));
125 } else if (locals_count > 1) {
126 if (locals_count >= 128) {
127 Label ok;
128 __ mov(ecx, esp);
129 __ sub(ecx, Immediate(locals_count * kPointerSize));
130 ExternalReference stack_limit =
131 ExternalReference::address_of_real_stack_limit(isolate());
132 __ cmp(ecx, Operand::StaticVariable(stack_limit));
133 __ j(above_equal, &ok, Label::kNear);
134 __ CallRuntime(Runtime::kThrowStackOverflow);
135 __ bind(&ok);
136 }
137 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
138 const int kMaxPushes = 32;
139 if (locals_count >= kMaxPushes) {
140 int loop_iterations = locals_count / kMaxPushes;
141 __ mov(ecx, loop_iterations);
142 Label loop_header;
143 __ bind(&loop_header);
144 // Do pushes.
145 for (int i = 0; i < kMaxPushes; i++) {
146 __ push(eax);
147 }
148 __ dec(ecx);
149 __ j(not_zero, &loop_header, Label::kNear);
150 }
151 int remaining = locals_count % kMaxPushes;
152 // Emit the remaining pushes.
153 for (int i = 0; i < remaining; i++) {
154 __ push(eax);
155 }
156 }
157 }
158
159 bool function_in_register = true;
160
161 // Possibly allocate a local context.
162 if (info->scope()->num_heap_slots() > 0) {
163 Comment cmnt(masm_, "[ Allocate context");
164 bool need_write_barrier = true;
165 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
166 // Argument to NewContext is the function, which is still in edi.
167 if (info->scope()->is_script_scope()) {
168 __ push(edi);
169 __ Push(info->scope()->GetScopeInfo(info->isolate()));
170 __ CallRuntime(Runtime::kNewScriptContext);
171 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
172 // The new target value is not used, clobbering is safe.
173 DCHECK_NULL(info->scope()->new_target_var());
174 } else {
175 if (info->scope()->new_target_var() != nullptr) {
176 __ push(edx); // Preserve new target.
177 }
178 if (slots <= FastNewContextStub::kMaximumSlots) {
179 FastNewContextStub stub(isolate(), slots);
180 __ CallStub(&stub);
181 // Result of FastNewContextStub is always in new space.
182 need_write_barrier = false;
183 } else {
184 __ push(edi);
185 __ CallRuntime(Runtime::kNewFunctionContext);
186 }
187 if (info->scope()->new_target_var() != nullptr) {
188 __ pop(edx); // Restore new target.
189 }
190 }
191 function_in_register = false;
192 // Context is returned in eax. It replaces the context passed to us.
193 // It's saved in the stack and kept live in esi.
194 __ mov(esi, eax);
195 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
196
197 // Copy parameters into context if necessary.
198 int num_parameters = info->scope()->num_parameters();
199 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
200 for (int i = first_parameter; i < num_parameters; i++) {
201 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
202 if (var->IsContextSlot()) {
203 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204 (num_parameters - 1 - i) * kPointerSize;
205 // Load parameter from stack.
206 __ mov(eax, Operand(ebp, parameter_offset));
207 // Store it in the context.
208 int context_offset = Context::SlotOffset(var->index());
209 __ mov(Operand(esi, context_offset), eax);
210 // Update the write barrier. This clobbers eax and ebx.
211 if (need_write_barrier) {
212 __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
213 kDontSaveFPRegs);
214 } else if (FLAG_debug_code) {
215 Label done;
216 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
217 __ Abort(kExpectedNewSpaceObject);
218 __ bind(&done);
219 }
220 }
221 }
222 }
223
224 // Register holding this function and new target are both trashed in case we
225 // bailout here. But since that can happen only when new target is not used
226 // and we allocate a context, the value of |function_in_register| is correct.
227 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
228
229 // Possibly set up a local binding to the this function which is used in
230 // derived constructors with super calls.
231 Variable* this_function_var = scope()->this_function_var();
232 if (this_function_var != nullptr) {
233 Comment cmnt(masm_, "[ This function");
234 if (!function_in_register) {
235 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
236 // The write barrier clobbers register again, keep it marked as such.
237 }
238 SetVar(this_function_var, edi, ebx, ecx);
239 }
240
241 // Possibly set up a local binding to the new target value.
242 Variable* new_target_var = scope()->new_target_var();
243 if (new_target_var != nullptr) {
244 Comment cmnt(masm_, "[ new.target");
245 SetVar(new_target_var, edx, ebx, ecx);
246 }
247
248 // Possibly allocate RestParameters
249 int rest_index;
250 Variable* rest_param = scope()->rest_parameter(&rest_index);
251 if (rest_param) {
252 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100253 if (!function_in_register) {
254 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
255 }
256 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100258 function_in_register = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000259 SetVar(rest_param, eax, ebx, edx);
260 }
261
262 Variable* arguments = scope()->arguments();
263 if (arguments != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100264 // Arguments object must be allocated after the context object, in
265 // case the "arguments" or ".arguments" variables are in the context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 if (!function_in_register) {
268 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
269 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100270 if (is_strict(language_mode()) || !has_simple_parameters()) {
271 FastNewStrictArgumentsStub stub(isolate());
272 __ CallStub(&stub);
273 } else if (literal()->has_duplicate_parameters()) {
274 __ Push(edi);
275 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
276 } else {
277 FastNewSloppyArgumentsStub stub(isolate());
278 __ CallStub(&stub);
279 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280
281 SetVar(arguments, eax, ebx, edx);
282 }
283
284 if (FLAG_trace) {
285 __ CallRuntime(Runtime::kTraceEnter);
286 }
287
Ben Murdochda12d292016-06-02 14:46:10 +0100288 // Visit the declarations and body.
289 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
290 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000291 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100292 VisitDeclarations(scope()->declarations());
293 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294
Ben Murdochda12d292016-06-02 14:46:10 +0100295 // Assert that the declarations do not use ICs. Otherwise the debugger
296 // won't be able to redirect a PC at an IC to the correct IC in newly
297 // recompiled code.
298 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299
Ben Murdochda12d292016-06-02 14:46:10 +0100300 {
301 Comment cmnt(masm_, "[ Stack check");
302 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
303 Label ok;
304 ExternalReference stack_limit =
305 ExternalReference::address_of_stack_limit(isolate());
306 __ cmp(esp, Operand::StaticVariable(stack_limit));
307 __ j(above_equal, &ok, Label::kNear);
308 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
309 __ bind(&ok);
310 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000311
Ben Murdochda12d292016-06-02 14:46:10 +0100312 {
313 Comment cmnt(masm_, "[ Body");
314 DCHECK(loop_depth() == 0);
315 VisitStatements(literal()->body());
316 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000317 }
318
319 // Always emit a 'return undefined' in case control fell off the end of
320 // the body.
321 { Comment cmnt(masm_, "[ return <undefined>;");
322 __ mov(eax, isolate()->factory()->undefined_value());
323 EmitReturnSequence();
324 }
325}
326
327
328void FullCodeGenerator::ClearAccumulator() {
329 __ Move(eax, Immediate(Smi::FromInt(0)));
330}
331
332
333void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
334 __ mov(ebx, Immediate(profiling_counter_));
335 __ sub(FieldOperand(ebx, Cell::kValueOffset),
336 Immediate(Smi::FromInt(delta)));
337}
338
339
340void FullCodeGenerator::EmitProfilingCounterReset() {
341 int reset_value = FLAG_interrupt_budget;
342 __ mov(ebx, Immediate(profiling_counter_));
343 __ mov(FieldOperand(ebx, Cell::kValueOffset),
344 Immediate(Smi::FromInt(reset_value)));
345}
346
347
348void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
349 Label* back_edge_target) {
350 Comment cmnt(masm_, "[ Back edge bookkeeping");
351 Label ok;
352
353 DCHECK(back_edge_target->is_bound());
354 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
355 int weight = Min(kMaxBackEdgeWeight,
356 Max(1, distance / kCodeSizeMultiplier));
357 EmitProfilingCounterDecrement(weight);
358 __ j(positive, &ok, Label::kNear);
359 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
360
361 // Record a mapping of this PC offset to the OSR id. This is used to find
362 // the AST id from the unoptimized code in order to use it as a key into
363 // the deoptimization input data found in the optimized code.
364 RecordBackEdge(stmt->OsrEntryId());
365
366 EmitProfilingCounterReset();
367
368 __ bind(&ok);
369 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
370 // Record a mapping of the OSR id to this PC. This is used if the OSR
371 // entry becomes the target of a bailout. We don't expect it to be, but
372 // we want it to work if it is.
373 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
374}
375
Ben Murdoch097c5b22016-05-18 11:27:45 +0100376void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
377 bool is_tail_call) {
378 // Pretend that the exit is a backwards jump to the entry.
379 int weight = 1;
380 if (info_->ShouldSelfOptimize()) {
381 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
382 } else {
383 int distance = masm_->pc_offset();
384 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
385 }
386 EmitProfilingCounterDecrement(weight);
387 Label ok;
388 __ j(positive, &ok, Label::kNear);
389 // Don't need to save result register if we are going to do a tail call.
390 if (!is_tail_call) {
391 __ push(eax);
392 }
393 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
394 if (!is_tail_call) {
395 __ pop(eax);
396 }
397 EmitProfilingCounterReset();
398 __ bind(&ok);
399}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000400
401void FullCodeGenerator::EmitReturnSequence() {
402 Comment cmnt(masm_, "[ Return sequence");
403 if (return_label_.is_bound()) {
404 __ jmp(&return_label_);
405 } else {
406 // Common return label
407 __ bind(&return_label_);
408 if (FLAG_trace) {
409 __ push(eax);
410 __ CallRuntime(Runtime::kTraceExit);
411 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100412 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000413
414 SetReturnPosition(literal());
415 __ leave();
416
417 int arg_count = info_->scope()->num_parameters() + 1;
418 int arguments_bytes = arg_count * kPointerSize;
419 __ Ret(arguments_bytes, ecx);
420 }
421}
422
423
424void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
425 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
426 MemOperand operand = codegen()->VarOperand(var, result_register());
427 // Memory operands can be pushed directly.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100428 codegen()->PushOperand(operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000429}
430
431
432void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
433 UNREACHABLE(); // Not used on X87.
434}
435
436
437void FullCodeGenerator::AccumulatorValueContext::Plug(
438 Heap::RootListIndex index) const {
439 UNREACHABLE(); // Not used on X87.
440}
441
442
443void FullCodeGenerator::StackValueContext::Plug(
444 Heap::RootListIndex index) const {
445 UNREACHABLE(); // Not used on X87.
446}
447
448
449void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
450 UNREACHABLE(); // Not used on X87.
451}
452
453
454void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
455}
456
457
458void FullCodeGenerator::AccumulatorValueContext::Plug(
459 Handle<Object> lit) const {
460 if (lit->IsSmi()) {
461 __ SafeMove(result_register(), Immediate(lit));
462 } else {
463 __ Move(result_register(), Immediate(lit));
464 }
465}
466
467
468void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100469 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000470 if (lit->IsSmi()) {
471 __ SafePush(Immediate(lit));
472 } else {
473 __ push(Immediate(lit));
474 }
475}
476
477
478void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
479 codegen()->PrepareForBailoutBeforeSplit(condition(),
480 true,
481 true_label_,
482 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100483 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000484 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
485 if (false_label_ != fall_through_) __ jmp(false_label_);
486 } else if (lit->IsTrue() || lit->IsJSObject()) {
487 if (true_label_ != fall_through_) __ jmp(true_label_);
488 } else if (lit->IsString()) {
489 if (String::cast(*lit)->length() == 0) {
490 if (false_label_ != fall_through_) __ jmp(false_label_);
491 } else {
492 if (true_label_ != fall_through_) __ jmp(true_label_);
493 }
494 } else if (lit->IsSmi()) {
495 if (Smi::cast(*lit)->value() == 0) {
496 if (false_label_ != fall_through_) __ jmp(false_label_);
497 } else {
498 if (true_label_ != fall_through_) __ jmp(true_label_);
499 }
500 } else {
501 // For simplicity we always test the accumulator register.
502 __ mov(result_register(), lit);
503 codegen()->DoTest(this);
504 }
505}
506
507
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000508void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
509 Register reg) const {
510 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100511 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000512 __ mov(Operand(esp, 0), reg);
513}
514
515
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000516void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
517 Label* materialize_false) const {
518 DCHECK(materialize_true == materialize_false);
519 __ bind(materialize_true);
520}
521
522
523void FullCodeGenerator::AccumulatorValueContext::Plug(
524 Label* materialize_true,
525 Label* materialize_false) const {
526 Label done;
527 __ bind(materialize_true);
528 __ mov(result_register(), isolate()->factory()->true_value());
529 __ jmp(&done, Label::kNear);
530 __ bind(materialize_false);
531 __ mov(result_register(), isolate()->factory()->false_value());
532 __ bind(&done);
533}
534
535
536void FullCodeGenerator::StackValueContext::Plug(
537 Label* materialize_true,
538 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100539 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540 Label done;
541 __ bind(materialize_true);
542 __ push(Immediate(isolate()->factory()->true_value()));
543 __ jmp(&done, Label::kNear);
544 __ bind(materialize_false);
545 __ push(Immediate(isolate()->factory()->false_value()));
546 __ bind(&done);
547}
548
549
550void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
551 Label* materialize_false) const {
552 DCHECK(materialize_true == true_label_);
553 DCHECK(materialize_false == false_label_);
554}
555
556
557void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
558 Handle<Object> value = flag
559 ? isolate()->factory()->true_value()
560 : isolate()->factory()->false_value();
561 __ mov(result_register(), value);
562}
563
564
565void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100566 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 Handle<Object> value = flag
568 ? isolate()->factory()->true_value()
569 : isolate()->factory()->false_value();
570 __ push(Immediate(value));
571}
572
573
574void FullCodeGenerator::TestContext::Plug(bool flag) const {
575 codegen()->PrepareForBailoutBeforeSplit(condition(),
576 true,
577 true_label_,
578 false_label_);
579 if (flag) {
580 if (true_label_ != fall_through_) __ jmp(true_label_);
581 } else {
582 if (false_label_ != fall_through_) __ jmp(false_label_);
583 }
584}
585
586
587void FullCodeGenerator::DoTest(Expression* condition,
588 Label* if_true,
589 Label* if_false,
590 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100591 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 CallIC(ic, condition->test_id());
593 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
594 Split(equal, if_true, if_false, fall_through);
595}
596
597
598void FullCodeGenerator::Split(Condition cc,
599 Label* if_true,
600 Label* if_false,
601 Label* fall_through) {
602 if (if_false == fall_through) {
603 __ j(cc, if_true);
604 } else if (if_true == fall_through) {
605 __ j(NegateCondition(cc), if_false);
606 } else {
607 __ j(cc, if_true);
608 __ jmp(if_false);
609 }
610}
611
612
613MemOperand FullCodeGenerator::StackOperand(Variable* var) {
614 DCHECK(var->IsStackAllocated());
615 // Offset is negative because higher indexes are at lower addresses.
616 int offset = -var->index() * kPointerSize;
617 // Adjust by a (parameter or local) base offset.
618 if (var->IsParameter()) {
619 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
620 } else {
621 offset += JavaScriptFrameConstants::kLocal0Offset;
622 }
623 return Operand(ebp, offset);
624}
625
626
627MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
628 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
629 if (var->IsContextSlot()) {
630 int context_chain_length = scope()->ContextChainLength(var->scope());
631 __ LoadContext(scratch, context_chain_length);
632 return ContextOperand(scratch, var->index());
633 } else {
634 return StackOperand(var);
635 }
636}
637
638
639void FullCodeGenerator::GetVar(Register dest, Variable* var) {
640 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
641 MemOperand location = VarOperand(var, dest);
642 __ mov(dest, location);
643}
644
645
646void FullCodeGenerator::SetVar(Variable* var,
647 Register src,
648 Register scratch0,
649 Register scratch1) {
650 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
651 DCHECK(!scratch0.is(src));
652 DCHECK(!scratch0.is(scratch1));
653 DCHECK(!scratch1.is(src));
654 MemOperand location = VarOperand(var, scratch0);
655 __ mov(location, src);
656
657 // Emit the write barrier code if the location is in the heap.
658 if (var->IsContextSlot()) {
659 int offset = Context::SlotOffset(var->index());
660 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
661 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
662 }
663}
664
665
666void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
667 bool should_normalize,
668 Label* if_true,
669 Label* if_false) {
670 // Only prepare for bailouts before splits if we're in a test
671 // context. Otherwise, we let the Visit function deal with the
672 // preparation to avoid preparing with the same AST id twice.
673 if (!context()->IsTest()) return;
674
675 Label skip;
676 if (should_normalize) __ jmp(&skip, Label::kNear);
677 PrepareForBailout(expr, TOS_REG);
678 if (should_normalize) {
679 __ cmp(eax, isolate()->factory()->true_value());
680 Split(equal, if_true, if_false, NULL);
681 __ bind(&skip);
682 }
683}
684
685
686void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
687 // The variable in the declaration always resides in the current context.
688 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100689 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000690 // Check that we're not inside a with or catch context.
691 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
692 __ cmp(ebx, isolate()->factory()->with_context_map());
693 __ Check(not_equal, kDeclarationInWithContext);
694 __ cmp(ebx, isolate()->factory()->catch_context_map());
695 __ Check(not_equal, kDeclarationInCatchContext);
696 }
697}
698
699
700void FullCodeGenerator::VisitVariableDeclaration(
701 VariableDeclaration* declaration) {
702 // If it was not possible to allocate the variable at compile time, we
703 // need to "declare" it at runtime to make sure it actually exists in the
704 // local context.
705 VariableProxy* proxy = declaration->proxy();
706 VariableMode mode = declaration->mode();
707 Variable* variable = proxy->var();
708 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
709 switch (variable->location()) {
710 case VariableLocation::GLOBAL:
711 case VariableLocation::UNALLOCATED:
712 globals_->Add(variable->name(), zone());
713 globals_->Add(variable->binding_needs_init()
714 ? isolate()->factory()->the_hole_value()
715 : isolate()->factory()->undefined_value(), zone());
716 break;
717
718 case VariableLocation::PARAMETER:
719 case VariableLocation::LOCAL:
720 if (hole_init) {
721 Comment cmnt(masm_, "[ VariableDeclaration");
722 __ mov(StackOperand(variable),
723 Immediate(isolate()->factory()->the_hole_value()));
724 }
725 break;
726
727 case VariableLocation::CONTEXT:
728 if (hole_init) {
729 Comment cmnt(masm_, "[ VariableDeclaration");
730 EmitDebugCheckDeclarationContext(variable);
731 __ mov(ContextOperand(esi, variable->index()),
732 Immediate(isolate()->factory()->the_hole_value()));
733 // No write barrier since the hole value is in old space.
734 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
735 }
736 break;
737
738 case VariableLocation::LOOKUP: {
739 Comment cmnt(masm_, "[ VariableDeclaration");
740 __ push(Immediate(variable->name()));
741 // VariableDeclaration nodes are always introduced in one of four modes.
742 DCHECK(IsDeclaredVariableMode(mode));
743 // Push initial value, if any.
744 // Note: For variables we must not push an initial value (such as
745 // 'undefined') because we may have a (legal) redeclaration and we
746 // must not destroy the current value.
747 if (hole_init) {
748 __ push(Immediate(isolate()->factory()->the_hole_value()));
749 } else {
750 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
751 }
752 __ push(
753 Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
754 __ CallRuntime(Runtime::kDeclareLookupSlot);
755 break;
756 }
757 }
758}
759
760void FullCodeGenerator::VisitFunctionDeclaration(
761 FunctionDeclaration* declaration) {
762 VariableProxy* proxy = declaration->proxy();
763 Variable* variable = proxy->var();
764 switch (variable->location()) {
765 case VariableLocation::GLOBAL:
766 case VariableLocation::UNALLOCATED: {
767 globals_->Add(variable->name(), zone());
768 Handle<SharedFunctionInfo> function =
769 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
770 // Check for stack-overflow exception.
771 if (function.is_null()) return SetStackOverflow();
772 globals_->Add(function, zone());
773 break;
774 }
775
776 case VariableLocation::PARAMETER:
777 case VariableLocation::LOCAL: {
778 Comment cmnt(masm_, "[ FunctionDeclaration");
779 VisitForAccumulatorValue(declaration->fun());
780 __ mov(StackOperand(variable), result_register());
781 break;
782 }
783
784 case VariableLocation::CONTEXT: {
785 Comment cmnt(masm_, "[ FunctionDeclaration");
786 EmitDebugCheckDeclarationContext(variable);
787 VisitForAccumulatorValue(declaration->fun());
788 __ mov(ContextOperand(esi, variable->index()), result_register());
789 // We know that we have written a function, which is not a smi.
790 __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
791 result_register(), ecx, kDontSaveFPRegs,
792 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
793 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
794 break;
795 }
796
797 case VariableLocation::LOOKUP: {
798 Comment cmnt(masm_, "[ FunctionDeclaration");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100799 PushOperand(variable->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000800 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100801 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
802 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000803 break;
804 }
805 }
806}
807
808
809void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
810 // Call the runtime to declare the globals.
811 __ Push(pairs);
812 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
813 __ CallRuntime(Runtime::kDeclareGlobals);
814 // Return value is ignored.
815}
816
817
818void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
819 // Call the runtime to declare the modules.
820 __ Push(descriptions);
821 __ CallRuntime(Runtime::kDeclareModules);
822 // Return value is ignored.
823}
824
825
826void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
827 Comment cmnt(masm_, "[ SwitchStatement");
828 Breakable nested_statement(this, stmt);
829 SetStatementPosition(stmt);
830
831 // Keep the switch value on the stack until a case matches.
832 VisitForStackValue(stmt->tag());
833 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
834
835 ZoneList<CaseClause*>* clauses = stmt->cases();
836 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
837
838 Label next_test; // Recycled for each test.
839 // Compile all the tests with branches to their bodies.
840 for (int i = 0; i < clauses->length(); i++) {
841 CaseClause* clause = clauses->at(i);
842 clause->body_target()->Unuse();
843
844 // The default is not a test, but remember it as final fall through.
845 if (clause->is_default()) {
846 default_clause = clause;
847 continue;
848 }
849
850 Comment cmnt(masm_, "[ Case comparison");
851 __ bind(&next_test);
852 next_test.Unuse();
853
854 // Compile the label expression.
855 VisitForAccumulatorValue(clause->label());
856
857 // Perform the comparison as if via '==='.
858 __ mov(edx, Operand(esp, 0)); // Switch value.
859 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
860 JumpPatchSite patch_site(masm_);
861 if (inline_smi_code) {
862 Label slow_case;
863 __ mov(ecx, edx);
864 __ or_(ecx, eax);
865 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
866
867 __ cmp(edx, eax);
868 __ j(not_equal, &next_test);
869 __ Drop(1); // Switch value is no longer needed.
870 __ jmp(clause->body_target());
871 __ bind(&slow_case);
872 }
873
874 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100875 Handle<Code> ic =
876 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000877 CallIC(ic, clause->CompareId());
878 patch_site.EmitPatchInfo();
879
880 Label skip;
881 __ jmp(&skip, Label::kNear);
882 PrepareForBailout(clause, TOS_REG);
883 __ cmp(eax, isolate()->factory()->true_value());
884 __ j(not_equal, &next_test);
885 __ Drop(1);
886 __ jmp(clause->body_target());
887 __ bind(&skip);
888
889 __ test(eax, eax);
890 __ j(not_equal, &next_test);
891 __ Drop(1); // Switch value is no longer needed.
892 __ jmp(clause->body_target());
893 }
894
895 // Discard the test value and jump to the default if present, otherwise to
896 // the end of the statement.
897 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100898 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000899 if (default_clause == NULL) {
900 __ jmp(nested_statement.break_label());
901 } else {
902 __ jmp(default_clause->body_target());
903 }
904
905 // Compile all the case bodies.
906 for (int i = 0; i < clauses->length(); i++) {
907 Comment cmnt(masm_, "[ Case body");
908 CaseClause* clause = clauses->at(i);
909 __ bind(clause->body_target());
910 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
911 VisitStatements(clause->statements());
912 }
913
914 __ bind(nested_statement.break_label());
915 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
916}
917
918
919void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
920 Comment cmnt(masm_, "[ ForInStatement");
921 SetStatementPosition(stmt, SKIP_BREAK);
922
923 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
924
Ben Murdoch097c5b22016-05-18 11:27:45 +0100925 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000926 SetExpressionAsStatementPosition(stmt->enumerable());
927 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100928 OperandStackDepthIncrement(5);
929
930 Label loop, exit;
931 Iteration loop_statement(this, stmt);
932 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000933
Ben Murdoch097c5b22016-05-18 11:27:45 +0100934 // If the object is null or undefined, skip over the loop, otherwise convert
935 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000936 Label convert, done_convert;
937 __ JumpIfSmi(eax, &convert, Label::kNear);
938 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
939 __ j(above_equal, &done_convert, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100940 __ cmp(eax, isolate()->factory()->undefined_value());
941 __ j(equal, &exit);
942 __ cmp(eax, isolate()->factory()->null_value());
943 __ j(equal, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000944 __ bind(&convert);
945 ToObjectStub stub(isolate());
946 __ CallStub(&stub);
947 __ bind(&done_convert);
948 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
949 __ push(eax);
950
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000951 // Check cache validity in generated code. This is a fast case for
952 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
953 // guarantee cache validity, call the runtime system to check cache
954 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100955 // Note: Proxies never have an enum cache, so will always take the
956 // slow path.
957 Label call_runtime, use_cache, fixed_array;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000958 __ CheckEnumCache(&call_runtime);
959
960 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
961 __ jmp(&use_cache, Label::kNear);
962
963 // Get the set of properties to enumerate.
964 __ bind(&call_runtime);
965 __ push(eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100966 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000967 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
968 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
969 isolate()->factory()->meta_map());
970 __ j(not_equal, &fixed_array);
971
972
973 // We got a map in register eax. Get the enumeration cache from it.
974 Label no_descriptors;
975 __ bind(&use_cache);
976
977 __ EnumLength(edx, eax);
978 __ cmp(edx, Immediate(Smi::FromInt(0)));
979 __ j(equal, &no_descriptors);
980
981 __ LoadInstanceDescriptors(eax, ecx);
982 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
983 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
984
985 // Set up the four remaining stack slots.
986 __ push(eax); // Map.
987 __ push(ecx); // Enumeration cache.
988 __ push(edx); // Number of valid entries for the map in the enum cache.
989 __ push(Immediate(Smi::FromInt(0))); // Initial index.
990 __ jmp(&loop);
991
992 __ bind(&no_descriptors);
993 __ add(esp, Immediate(kPointerSize));
994 __ jmp(&exit);
995
996 // We got a fixed array in register eax. Iterate through that.
997 __ bind(&fixed_array);
998
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999 __ push(Immediate(Smi::FromInt(1))); // Smi(1) undicates slow check
1000 __ push(eax); // Array
1001 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1002 __ push(eax); // Fixed array length (as smi).
Ben Murdoch097c5b22016-05-18 11:27:45 +01001003 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001004 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1005
1006 // Generate code for doing the condition check.
1007 __ bind(&loop);
1008 SetExpressionAsStatementPosition(stmt->each());
1009
1010 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1011 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1012 __ j(above_equal, loop_statement.break_label());
1013
1014 // Get the current entry of the array into register ebx.
1015 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1016 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1017
1018 // Get the expected map from the stack or a smi in the
1019 // permanent slow case into register edx.
1020 __ mov(edx, Operand(esp, 3 * kPointerSize));
1021
1022 // Check if the expected map still matches that of the enumerable.
1023 // If not, we may have to filter the key.
1024 Label update_each;
1025 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1026 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1027 __ j(equal, &update_each, Label::kNear);
1028
Ben Murdochda12d292016-06-02 14:46:10 +01001029 // We need to filter the key, record slow-path here.
1030 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001031 __ EmitLoadTypeFeedbackVector(edx);
1032 __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1033 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1034
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035 // Convert the entry to a string or null if it isn't a property
1036 // anymore. If the property has been removed while iterating, we
1037 // just skip it.
1038 __ push(ecx); // Enumerable.
1039 __ push(ebx); // Current entry.
1040 __ CallRuntime(Runtime::kForInFilter);
1041 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1042 __ cmp(eax, isolate()->factory()->undefined_value());
1043 __ j(equal, loop_statement.continue_label());
1044 __ mov(ebx, eax);
1045
1046 // Update the 'each' property or variable from the possibly filtered
1047 // entry in register ebx.
1048 __ bind(&update_each);
1049 __ mov(result_register(), ebx);
1050 // Perform the assignment as if via '='.
1051 { EffectContext context(this);
1052 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1053 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1054 }
1055
1056 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1057 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1058 // Generate code for the body of the loop.
1059 Visit(stmt->body());
1060
1061 // Generate code for going to the next element by incrementing the
1062 // index (smi) stored on top of the stack.
1063 __ bind(loop_statement.continue_label());
1064 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1065
1066 EmitBackEdgeBookkeeping(stmt, &loop);
1067 __ jmp(&loop);
1068
1069 // Remove the pointers stored on the stack.
1070 __ bind(loop_statement.break_label());
Ben Murdochda12d292016-06-02 14:46:10 +01001071 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001072
1073 // Exit and decrement the loop depth.
1074 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1075 __ bind(&exit);
1076 decrement_loop_depth();
1077}
1078
1079
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1081 FeedbackVectorSlot slot) {
1082 DCHECK(NeedsHomeObject(initializer));
1083 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1084 __ mov(StoreDescriptor::NameRegister(),
1085 Immediate(isolate()->factory()->home_object_symbol()));
1086 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1087 EmitLoadStoreICSlot(slot);
1088 CallStoreIC();
1089}
1090
1091
1092void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1093 int offset,
1094 FeedbackVectorSlot slot) {
1095 DCHECK(NeedsHomeObject(initializer));
1096 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1097 __ mov(StoreDescriptor::NameRegister(),
1098 Immediate(isolate()->factory()->home_object_symbol()));
1099 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1100 EmitLoadStoreICSlot(slot);
1101 CallStoreIC();
1102}
1103
1104
1105void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1106 TypeofMode typeof_mode,
1107 Label* slow) {
1108 Register context = esi;
1109 Register temp = edx;
1110
1111 Scope* s = scope();
1112 while (s != NULL) {
1113 if (s->num_heap_slots() > 0) {
1114 if (s->calls_sloppy_eval()) {
1115 // Check that extension is "the hole".
1116 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1117 Heap::kTheHoleValueRootIndex, slow);
1118 }
1119 // Load next context in chain.
1120 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1121 // Walk the rest of the chain without clobbering esi.
1122 context = temp;
1123 }
1124 // If no outer scope calls eval, we do not need to check more
1125 // context extensions. If we have reached an eval scope, we check
1126 // all extensions from this point.
1127 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1128 s = s->outer_scope();
1129 }
1130
1131 if (s != NULL && s->is_eval_scope()) {
1132 // Loop up the context chain. There is no frame effect so it is
1133 // safe to use raw labels here.
1134 Label next, fast;
1135 if (!context.is(temp)) {
1136 __ mov(temp, context);
1137 }
1138 __ bind(&next);
1139 // Terminate at native context.
1140 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1141 Immediate(isolate()->factory()->native_context_map()));
1142 __ j(equal, &fast, Label::kNear);
1143 // Check that extension is "the hole".
1144 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1145 Heap::kTheHoleValueRootIndex, slow);
1146 // Load next context in chain.
1147 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1148 __ jmp(&next);
1149 __ bind(&fast);
1150 }
1151
1152 // All extension objects were empty and it is safe to use a normal global
1153 // load machinery.
1154 EmitGlobalVariableLoad(proxy, typeof_mode);
1155}
1156
1157
1158MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1159 Label* slow) {
1160 DCHECK(var->IsContextSlot());
1161 Register context = esi;
1162 Register temp = ebx;
1163
1164 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1165 if (s->num_heap_slots() > 0) {
1166 if (s->calls_sloppy_eval()) {
1167 // Check that extension is "the hole".
1168 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1169 Heap::kTheHoleValueRootIndex, slow);
1170 }
1171 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1172 // Walk the rest of the chain without clobbering esi.
1173 context = temp;
1174 }
1175 }
1176 // Check that last extension is "the hole".
1177 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1178 Heap::kTheHoleValueRootIndex, slow);
1179
1180 // This function is used only for loads, not stores, so it's safe to
1181 // return an esi-based operand (the write barrier cannot be allowed to
1182 // destroy the esi register).
1183 return ContextOperand(context, var->index());
1184}
1185
1186
1187void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1188 TypeofMode typeof_mode,
1189 Label* slow, Label* done) {
1190 // Generate fast-case code for variables that might be shadowed by
1191 // eval-introduced variables. Eval is used a lot without
1192 // introducing variables. In those cases, we do not want to
1193 // perform a runtime call for all variables in the scope
1194 // containing the eval.
1195 Variable* var = proxy->var();
1196 if (var->mode() == DYNAMIC_GLOBAL) {
1197 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1198 __ jmp(done);
1199 } else if (var->mode() == DYNAMIC_LOCAL) {
1200 Variable* local = var->local_if_not_shadowed();
1201 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1202 if (local->mode() == LET || local->mode() == CONST ||
1203 local->mode() == CONST_LEGACY) {
1204 __ cmp(eax, isolate()->factory()->the_hole_value());
1205 __ j(not_equal, done);
1206 if (local->mode() == CONST_LEGACY) {
1207 __ mov(eax, isolate()->factory()->undefined_value());
1208 } else { // LET || CONST
1209 __ push(Immediate(var->name()));
1210 __ CallRuntime(Runtime::kThrowReferenceError);
1211 }
1212 }
1213 __ jmp(done);
1214 }
1215}
1216
1217
1218void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1219 TypeofMode typeof_mode) {
1220 Variable* var = proxy->var();
1221 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1222 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1223 __ mov(LoadDescriptor::ReceiverRegister(), NativeContextOperand());
1224 __ mov(LoadDescriptor::ReceiverRegister(),
1225 ContextOperand(LoadDescriptor::ReceiverRegister(),
1226 Context::EXTENSION_INDEX));
1227 __ mov(LoadDescriptor::NameRegister(), var->name());
1228 __ mov(LoadDescriptor::SlotRegister(),
1229 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1230 CallLoadIC(typeof_mode);
1231}
1232
1233
1234void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1235 TypeofMode typeof_mode) {
1236 SetExpressionPosition(proxy);
1237 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1238 Variable* var = proxy->var();
1239
1240 // Three cases: global variables, lookup variables, and all other types of
1241 // variables.
1242 switch (var->location()) {
1243 case VariableLocation::GLOBAL:
1244 case VariableLocation::UNALLOCATED: {
1245 Comment cmnt(masm_, "[ Global variable");
1246 EmitGlobalVariableLoad(proxy, typeof_mode);
1247 context()->Plug(eax);
1248 break;
1249 }
1250
1251 case VariableLocation::PARAMETER:
1252 case VariableLocation::LOCAL:
1253 case VariableLocation::CONTEXT: {
1254 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1255 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1256 : "[ Stack variable");
1257
1258 if (NeedsHoleCheckForLoad(proxy)) {
1259 // Let and const need a read barrier.
1260 Label done;
1261 GetVar(eax, var);
1262 __ cmp(eax, isolate()->factory()->the_hole_value());
1263 __ j(not_equal, &done, Label::kNear);
1264 if (var->mode() == LET || var->mode() == CONST) {
1265 // Throw a reference error when using an uninitialized let/const
1266 // binding in harmony mode.
1267 __ push(Immediate(var->name()));
1268 __ CallRuntime(Runtime::kThrowReferenceError);
1269 } else {
1270 // Uninitialized legacy const bindings are unholed.
1271 DCHECK(var->mode() == CONST_LEGACY);
1272 __ mov(eax, isolate()->factory()->undefined_value());
1273 }
1274 __ bind(&done);
1275 context()->Plug(eax);
1276 break;
1277 }
1278 context()->Plug(var);
1279 break;
1280 }
1281
1282 case VariableLocation::LOOKUP: {
1283 Comment cmnt(masm_, "[ Lookup variable");
1284 Label done, slow;
1285 // Generate code for loading from variables potentially shadowed
1286 // by eval-introduced variables.
1287 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1288 __ bind(&slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001289 __ push(Immediate(var->name()));
1290 Runtime::FunctionId function_id =
1291 typeof_mode == NOT_INSIDE_TYPEOF
1292 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001293 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001294 __ CallRuntime(function_id);
1295 __ bind(&done);
1296 context()->Plug(eax);
1297 break;
1298 }
1299 }
1300}
1301
1302
1303void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1304 Comment cmnt(masm_, "[ RegExpLiteral");
1305 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1306 __ Move(eax, Immediate(Smi::FromInt(expr->literal_index())));
1307 __ Move(ecx, Immediate(expr->pattern()));
1308 __ Move(edx, Immediate(Smi::FromInt(expr->flags())));
1309 FastCloneRegExpStub stub(isolate());
1310 __ CallStub(&stub);
1311 context()->Plug(eax);
1312}
1313
1314
1315void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1316 Expression* expression = (property == NULL) ? NULL : property->value();
1317 if (expression == NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001318 PushOperand(isolate()->factory()->null_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001319 } else {
1320 VisitForStackValue(expression);
1321 if (NeedsHomeObject(expression)) {
1322 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1323 property->kind() == ObjectLiteral::Property::SETTER);
1324 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1325 EmitSetHomeObject(expression, offset, property->GetSlot());
1326 }
1327 }
1328}
1329
1330
1331void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1332 Comment cmnt(masm_, "[ ObjectLiteral");
1333
1334 Handle<FixedArray> constant_properties = expr->constant_properties();
1335 int flags = expr->ComputeFlags();
1336 // If any of the keys would store to the elements array, then we shouldn't
1337 // allow it.
1338 if (MustCreateObjectLiteralWithRuntime(expr)) {
1339 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1340 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1341 __ push(Immediate(constant_properties));
1342 __ push(Immediate(Smi::FromInt(flags)));
1343 __ CallRuntime(Runtime::kCreateObjectLiteral);
1344 } else {
1345 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1346 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1347 __ mov(ecx, Immediate(constant_properties));
1348 __ mov(edx, Immediate(Smi::FromInt(flags)));
1349 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1350 __ CallStub(&stub);
1351 }
1352 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1353
1354 // If result_saved is true the result is on top of the stack. If
1355 // result_saved is false the result is in eax.
1356 bool result_saved = false;
1357
1358 AccessorTable accessor_table(zone());
1359 int property_index = 0;
1360 for (; property_index < expr->properties()->length(); property_index++) {
1361 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1362 if (property->is_computed_name()) break;
1363 if (property->IsCompileTimeValue()) continue;
1364
1365 Literal* key = property->key()->AsLiteral();
1366 Expression* value = property->value();
1367 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001368 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001369 result_saved = true;
1370 }
1371 switch (property->kind()) {
1372 case ObjectLiteral::Property::CONSTANT:
1373 UNREACHABLE();
1374 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1375 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1376 // Fall through.
1377 case ObjectLiteral::Property::COMPUTED:
1378 // It is safe to use [[Put]] here because the boilerplate already
1379 // contains computed properties with an uninitialized value.
1380 if (key->value()->IsInternalizedString()) {
1381 if (property->emit_store()) {
1382 VisitForAccumulatorValue(value);
1383 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1384 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1385 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1386 EmitLoadStoreICSlot(property->GetSlot(0));
1387 CallStoreIC();
1388 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1389 if (NeedsHomeObject(value)) {
1390 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1391 }
1392 } else {
1393 VisitForEffect(value);
1394 }
1395 break;
1396 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001397 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001398 VisitForStackValue(key);
1399 VisitForStackValue(value);
1400 if (property->emit_store()) {
1401 if (NeedsHomeObject(value)) {
1402 EmitSetHomeObject(value, 2, property->GetSlot());
1403 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001404 PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1405 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001406 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001407 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001408 }
1409 break;
1410 case ObjectLiteral::Property::PROTOTYPE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001411 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001412 VisitForStackValue(value);
1413 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001414 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1416 NO_REGISTERS);
1417 break;
1418 case ObjectLiteral::Property::GETTER:
1419 if (property->emit_store()) {
1420 accessor_table.lookup(key)->second->getter = property;
1421 }
1422 break;
1423 case ObjectLiteral::Property::SETTER:
1424 if (property->emit_store()) {
1425 accessor_table.lookup(key)->second->setter = property;
1426 }
1427 break;
1428 }
1429 }
1430
1431 // Emit code to define accessors, using only a single call to the runtime for
1432 // each pair of corresponding getters and setters.
1433 for (AccessorTable::Iterator it = accessor_table.begin();
1434 it != accessor_table.end();
1435 ++it) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001436 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001437 VisitForStackValue(it->first);
1438
1439 EmitAccessor(it->second->getter);
1440 EmitAccessor(it->second->setter);
1441
Ben Murdoch097c5b22016-05-18 11:27:45 +01001442 PushOperand(Smi::FromInt(NONE));
1443 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001444 }
1445
1446 // Object literals have two parts. The "static" part on the left contains no
1447 // computed property names, and so we can compute its map ahead of time; see
1448 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1449 // starts with the first computed property name, and continues with all
1450 // properties to its right. All the code from above initializes the static
1451 // component of the object literal, and arranges for the map of the result to
1452 // reflect the static order in which the keys appear. For the dynamic
1453 // properties, we compile them into a series of "SetOwnProperty" runtime
1454 // calls. This will preserve insertion order.
1455 for (; property_index < expr->properties()->length(); property_index++) {
1456 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1457
1458 Expression* value = property->value();
1459 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001460 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001461 result_saved = true;
1462 }
1463
Ben Murdoch097c5b22016-05-18 11:27:45 +01001464 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001465
1466 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1467 DCHECK(!property->is_computed_name());
1468 VisitForStackValue(value);
1469 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001470 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001471 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1472 NO_REGISTERS);
1473 } else {
1474 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1475 VisitForStackValue(value);
1476 if (NeedsHomeObject(value)) {
1477 EmitSetHomeObject(value, 2, property->GetSlot());
1478 }
1479
1480 switch (property->kind()) {
1481 case ObjectLiteral::Property::CONSTANT:
1482 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1483 case ObjectLiteral::Property::COMPUTED:
1484 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001485 PushOperand(Smi::FromInt(NONE));
1486 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1487 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001488 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001489 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001490 }
1491 break;
1492
1493 case ObjectLiteral::Property::PROTOTYPE:
1494 UNREACHABLE();
1495 break;
1496
1497 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001498 PushOperand(Smi::FromInt(NONE));
1499 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001500 break;
1501
1502 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001503 PushOperand(Smi::FromInt(NONE));
1504 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001505 break;
1506 }
1507 }
1508 }
1509
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001510 if (result_saved) {
1511 context()->PlugTOS();
1512 } else {
1513 context()->Plug(eax);
1514 }
1515}
1516
1517
1518void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1519 Comment cmnt(masm_, "[ ArrayLiteral");
1520
1521 Handle<FixedArray> constant_elements = expr->constant_elements();
1522 bool has_constant_fast_elements =
1523 IsFastObjectElementsKind(expr->constant_elements_kind());
1524
1525 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1526 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1527 // If the only customer of allocation sites is transitioning, then
1528 // we can turn it off if we don't have anywhere else to transition to.
1529 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1530 }
1531
1532 if (MustCreateArrayLiteralWithRuntime(expr)) {
1533 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1534 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1535 __ push(Immediate(constant_elements));
1536 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1537 __ CallRuntime(Runtime::kCreateArrayLiteral);
1538 } else {
1539 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1540 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1541 __ mov(ecx, Immediate(constant_elements));
1542 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1543 __ CallStub(&stub);
1544 }
1545 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1546
1547 bool result_saved = false; // Is the result saved to the stack?
1548 ZoneList<Expression*>* subexprs = expr->values();
1549 int length = subexprs->length();
1550
1551 // Emit code to evaluate all the non-constant subexpressions and to store
1552 // them into the newly cloned array.
1553 int array_index = 0;
1554 for (; array_index < length; array_index++) {
1555 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001556 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001557
1558 // If the subexpression is a literal or a simple materialized literal it
1559 // is already set in the cloned array.
1560 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1561
1562 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001563 PushOperand(eax); // array literal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001564 result_saved = true;
1565 }
1566 VisitForAccumulatorValue(subexpr);
1567
1568 __ mov(StoreDescriptor::NameRegister(),
1569 Immediate(Smi::FromInt(array_index)));
1570 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1571 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1572 Handle<Code> ic =
1573 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1574 CallIC(ic);
1575 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1576 }
1577
1578 // In case the array literal contains spread expressions it has two parts. The
1579 // first part is the "static" array which has a literal index is handled
1580 // above. The second part is the part after the first spread expression
1581 // (inclusive) and these elements gets appended to the array. Note that the
1582 // number elements an iterable produces is unknown ahead of time.
1583 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001584 PopOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 result_saved = false;
1586 }
1587 for (; array_index < length; array_index++) {
1588 Expression* subexpr = subexprs->at(array_index);
1589
Ben Murdoch097c5b22016-05-18 11:27:45 +01001590 PushOperand(eax);
1591 DCHECK(!subexpr->IsSpread());
1592 VisitForStackValue(subexpr);
1593 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001594
1595 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1596 }
1597
1598 if (result_saved) {
1599 context()->PlugTOS();
1600 } else {
1601 context()->Plug(eax);
1602 }
1603}
1604
1605
1606void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1607 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1608
1609 Comment cmnt(masm_, "[ Assignment");
1610 SetExpressionPosition(expr, INSERT_BREAK);
1611
1612 Property* property = expr->target()->AsProperty();
1613 LhsKind assign_type = Property::GetAssignType(property);
1614
1615 // Evaluate LHS expression.
1616 switch (assign_type) {
1617 case VARIABLE:
1618 // Nothing to do here.
1619 break;
1620 case NAMED_SUPER_PROPERTY:
1621 VisitForStackValue(
1622 property->obj()->AsSuperPropertyReference()->this_var());
1623 VisitForAccumulatorValue(
1624 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001625 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001626 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001627 PushOperand(MemOperand(esp, kPointerSize));
1628 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001629 }
1630 break;
1631 case NAMED_PROPERTY:
1632 if (expr->is_compound()) {
1633 // We need the receiver both on the stack and in the register.
1634 VisitForStackValue(property->obj());
1635 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1636 } else {
1637 VisitForStackValue(property->obj());
1638 }
1639 break;
1640 case KEYED_SUPER_PROPERTY:
1641 VisitForStackValue(
1642 property->obj()->AsSuperPropertyReference()->this_var());
1643 VisitForStackValue(
1644 property->obj()->AsSuperPropertyReference()->home_object());
1645 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001646 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001647 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001648 PushOperand(MemOperand(esp, 2 * kPointerSize));
1649 PushOperand(MemOperand(esp, 2 * kPointerSize));
1650 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001651 }
1652 break;
1653 case KEYED_PROPERTY: {
1654 if (expr->is_compound()) {
1655 VisitForStackValue(property->obj());
1656 VisitForStackValue(property->key());
1657 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1658 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1659 } else {
1660 VisitForStackValue(property->obj());
1661 VisitForStackValue(property->key());
1662 }
1663 break;
1664 }
1665 }
1666
1667 // For compound assignments we need another deoptimization point after the
1668 // variable/property load.
1669 if (expr->is_compound()) {
1670 AccumulatorValueContext result_context(this);
1671 { AccumulatorValueContext left_operand_context(this);
1672 switch (assign_type) {
1673 case VARIABLE:
1674 EmitVariableLoad(expr->target()->AsVariableProxy());
1675 PrepareForBailout(expr->target(), TOS_REG);
1676 break;
1677 case NAMED_SUPER_PROPERTY:
1678 EmitNamedSuperPropertyLoad(property);
1679 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1680 break;
1681 case NAMED_PROPERTY:
1682 EmitNamedPropertyLoad(property);
1683 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1684 break;
1685 case KEYED_SUPER_PROPERTY:
1686 EmitKeyedSuperPropertyLoad(property);
1687 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1688 break;
1689 case KEYED_PROPERTY:
1690 EmitKeyedPropertyLoad(property);
1691 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1692 break;
1693 }
1694 }
1695
1696 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001697 PushOperand(eax); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001698 VisitForAccumulatorValue(expr->value());
1699
1700 if (ShouldInlineSmiCase(op)) {
1701 EmitInlineSmiBinaryOp(expr->binary_operation(),
1702 op,
1703 expr->target(),
1704 expr->value());
1705 } else {
1706 EmitBinaryOp(expr->binary_operation(), op);
1707 }
1708
1709 // Deoptimization point in case the binary operation may have side effects.
1710 PrepareForBailout(expr->binary_operation(), TOS_REG);
1711 } else {
1712 VisitForAccumulatorValue(expr->value());
1713 }
1714
1715 SetExpressionPosition(expr);
1716
1717 // Store the value.
1718 switch (assign_type) {
1719 case VARIABLE:
1720 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1721 expr->op(), expr->AssignmentSlot());
1722 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1723 context()->Plug(eax);
1724 break;
1725 case NAMED_PROPERTY:
1726 EmitNamedPropertyAssignment(expr);
1727 break;
1728 case NAMED_SUPER_PROPERTY:
1729 EmitNamedSuperPropertyStore(property);
1730 context()->Plug(result_register());
1731 break;
1732 case KEYED_SUPER_PROPERTY:
1733 EmitKeyedSuperPropertyStore(property);
1734 context()->Plug(result_register());
1735 break;
1736 case KEYED_PROPERTY:
1737 EmitKeyedPropertyAssignment(expr);
1738 break;
1739 }
1740}
1741
1742
1743void FullCodeGenerator::VisitYield(Yield* expr) {
1744 Comment cmnt(masm_, "[ Yield");
1745 SetExpressionPosition(expr);
1746
1747 // Evaluate yielded value first; the initial iterator definition depends on
1748 // this. It stays on the stack while we update the iterator.
1749 VisitForStackValue(expr->expression());
1750
Ben Murdochda12d292016-06-02 14:46:10 +01001751 Label suspend, continuation, post_runtime, resume;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001752
Ben Murdochda12d292016-06-02 14:46:10 +01001753 __ jmp(&suspend);
1754 __ bind(&continuation);
1755 // When we arrive here, the stack top is the resume mode and
1756 // result_register() holds the input value (the argument given to the
1757 // respective resume operation).
1758 __ RecordGeneratorContinuation();
1759 __ pop(ebx);
1760 __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::RETURN)));
1761 __ j(not_equal, &resume);
1762 __ push(result_register());
1763 EmitCreateIteratorResult(true);
1764 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001765
Ben Murdochda12d292016-06-02 14:46:10 +01001766 __ bind(&suspend);
1767 OperandStackDepthIncrement(1); // Not popped on this path.
1768 VisitForAccumulatorValue(expr->generator_object());
1769 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1770 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1771 Immediate(Smi::FromInt(continuation.pos())));
1772 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1773 __ mov(ecx, esi);
1774 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1775 kDontSaveFPRegs);
1776 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1777 __ cmp(esp, ebx);
1778 __ j(equal, &post_runtime);
1779 __ push(eax); // generator object
1780 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1781 __ mov(context_register(),
1782 Operand(ebp, StandardFrameConstants::kContextOffset));
1783 __ bind(&post_runtime);
1784 PopOperand(result_register());
1785 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001786
Ben Murdochda12d292016-06-02 14:46:10 +01001787 __ bind(&resume);
1788 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001789}
1790
1791
1792void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1793 Expression *value,
1794 JSGeneratorObject::ResumeMode resume_mode) {
1795 // The value stays in eax, and is ultimately read by the resumed generator, as
1796 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1797 // is read to throw the value when the resumed generator is already closed.
1798 // ebx will hold the generator object until the activation has been resumed.
1799 VisitForStackValue(generator);
1800 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001801 PopOperand(ebx);
1802
1803 // Store input value into generator object.
1804 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOffset), result_register());
1805 __ mov(ecx, result_register());
1806 __ RecordWriteField(ebx, JSGeneratorObject::kInputOffset, ecx, edx,
1807 kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001808
1809 // Load suspended function and context.
1810 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
1811 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
1812
1813 // Push receiver.
1814 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
1815
Ben Murdochda12d292016-06-02 14:46:10 +01001816 // Push holes for arguments to generator function. Since the parser forced
1817 // context allocation for any variables in generators, the actual argument
1818 // values have already been copied into the context and these dummy values
1819 // will never be used.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001820 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1821 __ mov(edx,
1822 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1823 __ mov(ecx, isolate()->factory()->the_hole_value());
1824 Label push_argument_holes, push_frame;
1825 __ bind(&push_argument_holes);
1826 __ sub(edx, Immediate(Smi::FromInt(1)));
1827 __ j(carry, &push_frame);
1828 __ push(ecx);
1829 __ jmp(&push_argument_holes);
1830
1831 // Enter a new JavaScript frame, and initialize its slots as they were when
1832 // the generator was suspended.
1833 Label resume_frame, done;
1834 __ bind(&push_frame);
1835 __ call(&resume_frame);
1836 __ jmp(&done);
1837 __ bind(&resume_frame);
1838 __ push(ebp); // Caller's frame pointer.
1839 __ mov(ebp, esp);
1840 __ push(esi); // Callee's context.
1841 __ push(edi); // Callee's JS Function.
1842
1843 // Load the operand stack size.
1844 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
1845 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
1846 __ SmiUntag(edx);
1847
1848 // If we are sending a value and there is no operand stack, we can jump back
1849 // in directly.
1850 if (resume_mode == JSGeneratorObject::NEXT) {
1851 Label slow_resume;
1852 __ cmp(edx, Immediate(0));
1853 __ j(not_zero, &slow_resume);
1854 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
1855 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
1856 __ SmiUntag(ecx);
1857 __ add(edx, ecx);
1858 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
1859 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001860 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001861 __ jmp(edx);
1862 __ bind(&slow_resume);
1863 }
1864
1865 // Otherwise, we push holes for the operand stack and call the runtime to fix
1866 // up the stack and the handlers.
1867 Label push_operand_holes, call_resume;
1868 __ bind(&push_operand_holes);
1869 __ sub(edx, Immediate(1));
1870 __ j(carry, &call_resume);
1871 __ push(ecx);
1872 __ jmp(&push_operand_holes);
1873 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001874 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001875 __ push(ebx);
1876 __ push(result_register());
1877 __ Push(Smi::FromInt(resume_mode));
1878 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
1879 // Not reached: the runtime call returns elsewhere.
1880 __ Abort(kGeneratorFailedToResume);
1881
1882 __ bind(&done);
1883 context()->Plug(result_register());
1884}
1885
Ben Murdoch097c5b22016-05-18 11:27:45 +01001886void FullCodeGenerator::PushOperand(MemOperand operand) {
1887 OperandStackDepthIncrement(1);
1888 __ Push(operand);
1889}
1890
1891void FullCodeGenerator::EmitOperandStackDepthCheck() {
1892 if (FLAG_debug_code) {
1893 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1894 operand_stack_depth_ * kPointerSize;
1895 __ mov(eax, ebp);
1896 __ sub(eax, esp);
1897 __ cmp(eax, Immediate(expected_diff));
1898 __ Assert(equal, kUnexpectedStackDepth);
1899 }
1900}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001901
1902void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1903 Label allocate, done_allocate;
1904
1905 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate, TAG_OBJECT);
1906 __ jmp(&done_allocate, Label::kNear);
1907
1908 __ bind(&allocate);
1909 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1910 __ CallRuntime(Runtime::kAllocateInNewSpace);
1911
1912 __ bind(&done_allocate);
1913 __ mov(ebx, NativeContextOperand());
1914 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1915 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1916 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1917 isolate()->factory()->empty_fixed_array());
1918 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1919 isolate()->factory()->empty_fixed_array());
1920 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1921 __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1922 isolate()->factory()->ToBoolean(done));
1923 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
Ben Murdochda12d292016-06-02 14:46:10 +01001924 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001925}
1926
1927
1928void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1929 Token::Value op,
1930 Expression* left,
1931 Expression* right) {
1932 // Do combined smi check of the operands. Left operand is on the
1933 // stack. Right operand is in eax.
1934 Label smi_case, done, stub_call;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001935 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001936 __ mov(ecx, eax);
1937 __ or_(eax, edx);
1938 JumpPatchSite patch_site(masm_);
1939 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1940
1941 __ bind(&stub_call);
1942 __ mov(eax, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001943 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001944 CallIC(code, expr->BinaryOperationFeedbackId());
1945 patch_site.EmitPatchInfo();
1946 __ jmp(&done, Label::kNear);
1947
1948 // Smi case.
1949 __ bind(&smi_case);
1950 __ mov(eax, edx); // Copy left operand in case of a stub call.
1951
1952 switch (op) {
1953 case Token::SAR:
1954 __ SmiUntag(ecx);
1955 __ sar_cl(eax); // No checks of result necessary
1956 __ and_(eax, Immediate(~kSmiTagMask));
1957 break;
1958 case Token::SHL: {
1959 Label result_ok;
1960 __ SmiUntag(eax);
1961 __ SmiUntag(ecx);
1962 __ shl_cl(eax);
1963 // Check that the *signed* result fits in a smi.
1964 __ cmp(eax, 0xc0000000);
1965 __ j(positive, &result_ok);
1966 __ SmiTag(ecx);
1967 __ jmp(&stub_call);
1968 __ bind(&result_ok);
1969 __ SmiTag(eax);
1970 break;
1971 }
1972 case Token::SHR: {
1973 Label result_ok;
1974 __ SmiUntag(eax);
1975 __ SmiUntag(ecx);
1976 __ shr_cl(eax);
1977 __ test(eax, Immediate(0xc0000000));
1978 __ j(zero, &result_ok);
1979 __ SmiTag(ecx);
1980 __ jmp(&stub_call);
1981 __ bind(&result_ok);
1982 __ SmiTag(eax);
1983 break;
1984 }
1985 case Token::ADD:
1986 __ add(eax, ecx);
1987 __ j(overflow, &stub_call);
1988 break;
1989 case Token::SUB:
1990 __ sub(eax, ecx);
1991 __ j(overflow, &stub_call);
1992 break;
1993 case Token::MUL: {
1994 __ SmiUntag(eax);
1995 __ imul(eax, ecx);
1996 __ j(overflow, &stub_call);
1997 __ test(eax, eax);
1998 __ j(not_zero, &done, Label::kNear);
1999 __ mov(ebx, edx);
2000 __ or_(ebx, ecx);
2001 __ j(negative, &stub_call);
2002 break;
2003 }
2004 case Token::BIT_OR:
2005 __ or_(eax, ecx);
2006 break;
2007 case Token::BIT_AND:
2008 __ and_(eax, ecx);
2009 break;
2010 case Token::BIT_XOR:
2011 __ xor_(eax, ecx);
2012 break;
2013 default:
2014 UNREACHABLE();
2015 }
2016
2017 __ bind(&done);
2018 context()->Plug(eax);
2019}
2020
2021
2022void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002023 for (int i = 0; i < lit->properties()->length(); i++) {
2024 ObjectLiteral::Property* property = lit->properties()->at(i);
2025 Expression* value = property->value();
2026
2027 if (property->is_static()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002028 PushOperand(Operand(esp, kPointerSize)); // constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002029 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002030 PushOperand(Operand(esp, 0)); // prototype
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002031 }
2032 EmitPropertyKey(property, lit->GetIdForProperty(i));
2033
2034 // The static prototype property is read only. We handle the non computed
2035 // property name case in the parser. Since this is the only case where we
2036 // need to check for an own read only property we special case this so we do
2037 // not need to do this for every property.
2038 if (property->is_static() && property->is_computed_name()) {
2039 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2040 __ push(eax);
2041 }
2042
2043 VisitForStackValue(value);
2044 if (NeedsHomeObject(value)) {
2045 EmitSetHomeObject(value, 2, property->GetSlot());
2046 }
2047
2048 switch (property->kind()) {
2049 case ObjectLiteral::Property::CONSTANT:
2050 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2051 case ObjectLiteral::Property::PROTOTYPE:
2052 UNREACHABLE();
2053 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002054 PushOperand(Smi::FromInt(DONT_ENUM));
2055 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2056 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057 break;
2058
2059 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002060 PushOperand(Smi::FromInt(DONT_ENUM));
2061 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002062 break;
2063
2064 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002065 PushOperand(Smi::FromInt(DONT_ENUM));
2066 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002067 break;
2068 }
2069 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002070}
2071
2072
2073void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002074 PopOperand(edx);
2075 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002076 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2077 CallIC(code, expr->BinaryOperationFeedbackId());
2078 patch_site.EmitPatchInfo();
2079 context()->Plug(eax);
2080}
2081
2082
2083void FullCodeGenerator::EmitAssignment(Expression* expr,
2084 FeedbackVectorSlot slot) {
2085 DCHECK(expr->IsValidReferenceExpressionOrThis());
2086
2087 Property* prop = expr->AsProperty();
2088 LhsKind assign_type = Property::GetAssignType(prop);
2089
2090 switch (assign_type) {
2091 case VARIABLE: {
2092 Variable* var = expr->AsVariableProxy()->var();
2093 EffectContext context(this);
2094 EmitVariableAssignment(var, Token::ASSIGN, slot);
2095 break;
2096 }
2097 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002098 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002099 VisitForAccumulatorValue(prop->obj());
2100 __ Move(StoreDescriptor::ReceiverRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002101 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002102 __ mov(StoreDescriptor::NameRegister(),
2103 prop->key()->AsLiteral()->value());
2104 EmitLoadStoreICSlot(slot);
2105 CallStoreIC();
2106 break;
2107 }
2108 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002109 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002110 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2111 VisitForAccumulatorValue(
2112 prop->obj()->AsSuperPropertyReference()->home_object());
2113 // stack: value, this; eax: home_object
2114 Register scratch = ecx;
2115 Register scratch2 = edx;
2116 __ mov(scratch, result_register()); // home_object
2117 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2118 __ mov(scratch2, MemOperand(esp, 0)); // this
2119 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2120 __ mov(MemOperand(esp, 0), scratch); // home_object
2121 // stack: this, home_object. eax: value
2122 EmitNamedSuperPropertyStore(prop);
2123 break;
2124 }
2125 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002126 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002127 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2128 VisitForStackValue(
2129 prop->obj()->AsSuperPropertyReference()->home_object());
2130 VisitForAccumulatorValue(prop->key());
2131 Register scratch = ecx;
2132 Register scratch2 = edx;
2133 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2134 // stack: value, this, home_object; eax: key, edx: value
2135 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2136 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2137 __ mov(scratch, MemOperand(esp, 0)); // home_object
2138 __ mov(MemOperand(esp, kPointerSize), scratch);
2139 __ mov(MemOperand(esp, 0), eax);
2140 __ mov(eax, scratch2);
2141 // stack: this, home_object, key; eax: value.
2142 EmitKeyedSuperPropertyStore(prop);
2143 break;
2144 }
2145 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002146 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002147 VisitForStackValue(prop->obj());
2148 VisitForAccumulatorValue(prop->key());
2149 __ Move(StoreDescriptor::NameRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002150 PopOperand(StoreDescriptor::ReceiverRegister()); // Receiver.
2151 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002152 EmitLoadStoreICSlot(slot);
2153 Handle<Code> ic =
2154 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2155 CallIC(ic);
2156 break;
2157 }
2158 }
2159 context()->Plug(eax);
2160}
2161
2162
2163void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2164 Variable* var, MemOperand location) {
2165 __ mov(location, eax);
2166 if (var->IsContextSlot()) {
2167 __ mov(edx, eax);
2168 int offset = Context::SlotOffset(var->index());
2169 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2170 }
2171}
2172
2173
2174void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2175 FeedbackVectorSlot slot) {
2176 if (var->IsUnallocated()) {
2177 // Global var, const, or let.
2178 __ mov(StoreDescriptor::NameRegister(), var->name());
2179 __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2180 __ mov(StoreDescriptor::ReceiverRegister(),
2181 ContextOperand(StoreDescriptor::ReceiverRegister(),
2182 Context::EXTENSION_INDEX));
2183 EmitLoadStoreICSlot(slot);
2184 CallStoreIC();
2185
2186 } else if (var->mode() == LET && op != Token::INIT) {
2187 // Non-initializing assignment to let variable needs a write barrier.
2188 DCHECK(!var->IsLookupSlot());
2189 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2190 Label assign;
2191 MemOperand location = VarOperand(var, ecx);
2192 __ mov(edx, location);
2193 __ cmp(edx, isolate()->factory()->the_hole_value());
2194 __ j(not_equal, &assign, Label::kNear);
2195 __ push(Immediate(var->name()));
2196 __ CallRuntime(Runtime::kThrowReferenceError);
2197 __ bind(&assign);
2198 EmitStoreToStackLocalOrContextSlot(var, location);
2199
2200 } else if (var->mode() == CONST && op != Token::INIT) {
2201 // Assignment to const variable needs a write barrier.
2202 DCHECK(!var->IsLookupSlot());
2203 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2204 Label const_error;
2205 MemOperand location = VarOperand(var, ecx);
2206 __ mov(edx, location);
2207 __ cmp(edx, isolate()->factory()->the_hole_value());
2208 __ j(not_equal, &const_error, Label::kNear);
2209 __ push(Immediate(var->name()));
2210 __ CallRuntime(Runtime::kThrowReferenceError);
2211 __ bind(&const_error);
2212 __ CallRuntime(Runtime::kThrowConstAssignError);
2213
2214 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2215 // Initializing assignment to const {this} needs a write barrier.
2216 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2217 Label uninitialized_this;
2218 MemOperand location = VarOperand(var, ecx);
2219 __ mov(edx, location);
2220 __ cmp(edx, isolate()->factory()->the_hole_value());
2221 __ j(equal, &uninitialized_this);
2222 __ push(Immediate(var->name()));
2223 __ CallRuntime(Runtime::kThrowReferenceError);
2224 __ bind(&uninitialized_this);
2225 EmitStoreToStackLocalOrContextSlot(var, location);
2226
2227 } else if (!var->is_const_mode() ||
2228 (var->mode() == CONST && op == Token::INIT)) {
2229 if (var->IsLookupSlot()) {
2230 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002231 __ Push(Immediate(var->name()));
2232 __ Push(eax);
2233 __ CallRuntime(is_strict(language_mode())
2234 ? Runtime::kStoreLookupSlot_Strict
2235 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002236 } else {
2237 // Assignment to var or initializing assignment to let/const in harmony
2238 // mode.
2239 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2240 MemOperand location = VarOperand(var, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002241 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002242 // Check for an uninitialized let binding.
2243 __ mov(edx, location);
2244 __ cmp(edx, isolate()->factory()->the_hole_value());
2245 __ Check(equal, kLetBindingReInitialization);
2246 }
2247 EmitStoreToStackLocalOrContextSlot(var, location);
2248 }
2249
2250 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2251 // Const initializers need a write barrier.
2252 DCHECK(!var->IsParameter()); // No const parameters.
2253 if (var->IsLookupSlot()) {
2254 __ push(eax);
2255 __ push(esi);
2256 __ push(Immediate(var->name()));
2257 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2258 } else {
2259 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2260 Label skip;
2261 MemOperand location = VarOperand(var, ecx);
2262 __ mov(edx, location);
2263 __ cmp(edx, isolate()->factory()->the_hole_value());
2264 __ j(not_equal, &skip, Label::kNear);
2265 EmitStoreToStackLocalOrContextSlot(var, location);
2266 __ bind(&skip);
2267 }
2268
2269 } else {
2270 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2271 if (is_strict(language_mode())) {
2272 __ CallRuntime(Runtime::kThrowConstAssignError);
2273 }
2274 // Silently ignore store in sloppy mode.
2275 }
2276}
2277
2278
2279void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2280 // Assignment to a property, using a named store IC.
2281 // eax : value
2282 // esp[0] : receiver
2283 Property* prop = expr->target()->AsProperty();
2284 DCHECK(prop != NULL);
2285 DCHECK(prop->key()->IsLiteral());
2286
2287 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002288 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002289 EmitLoadStoreICSlot(expr->AssignmentSlot());
2290 CallStoreIC();
2291 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2292 context()->Plug(eax);
2293}
2294
2295
2296void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2297 // Assignment to named property of super.
2298 // eax : value
2299 // stack : receiver ('this'), home_object
2300 DCHECK(prop != NULL);
2301 Literal* key = prop->key()->AsLiteral();
2302 DCHECK(key != NULL);
2303
Ben Murdoch097c5b22016-05-18 11:27:45 +01002304 PushOperand(key->value());
2305 PushOperand(eax);
2306 CallRuntimeWithOperands(is_strict(language_mode())
2307 ? Runtime::kStoreToSuper_Strict
2308 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002309}
2310
2311
2312void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2313 // Assignment to named property of super.
2314 // eax : value
2315 // stack : receiver ('this'), home_object, key
2316
Ben Murdoch097c5b22016-05-18 11:27:45 +01002317 PushOperand(eax);
2318 CallRuntimeWithOperands(is_strict(language_mode())
2319 ? Runtime::kStoreKeyedToSuper_Strict
2320 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002321}
2322
2323
2324void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2325 // Assignment to a property, using a keyed store IC.
2326 // eax : value
2327 // esp[0] : key
2328 // esp[kPointerSize] : receiver
2329
Ben Murdoch097c5b22016-05-18 11:27:45 +01002330 PopOperand(StoreDescriptor::NameRegister()); // Key.
2331 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002332 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2333 Handle<Code> ic =
2334 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2335 EmitLoadStoreICSlot(expr->AssignmentSlot());
2336 CallIC(ic);
2337 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2338 context()->Plug(eax);
2339}
2340
2341
2342void FullCodeGenerator::VisitProperty(Property* expr) {
2343 Comment cmnt(masm_, "[ Property");
2344 SetExpressionPosition(expr);
2345
2346 Expression* key = expr->key();
2347
2348 if (key->IsPropertyName()) {
2349 if (!expr->IsSuperAccess()) {
2350 VisitForAccumulatorValue(expr->obj());
2351 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2352 EmitNamedPropertyLoad(expr);
2353 } else {
2354 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2355 VisitForStackValue(
2356 expr->obj()->AsSuperPropertyReference()->home_object());
2357 EmitNamedSuperPropertyLoad(expr);
2358 }
2359 } else {
2360 if (!expr->IsSuperAccess()) {
2361 VisitForStackValue(expr->obj());
2362 VisitForAccumulatorValue(expr->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002363 PopOperand(LoadDescriptor::ReceiverRegister()); // Object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002364 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2365 EmitKeyedPropertyLoad(expr);
2366 } else {
2367 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2368 VisitForStackValue(
2369 expr->obj()->AsSuperPropertyReference()->home_object());
2370 VisitForStackValue(expr->key());
2371 EmitKeyedSuperPropertyLoad(expr);
2372 }
2373 }
2374 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2375 context()->Plug(eax);
2376}
2377
2378
2379void FullCodeGenerator::CallIC(Handle<Code> code,
2380 TypeFeedbackId ast_id) {
2381 ic_total_count_++;
2382 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2383}
2384
2385
2386// Code common for calls using the IC.
2387void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2388 Expression* callee = expr->expression();
2389
2390 // Get the target function.
2391 ConvertReceiverMode convert_mode;
2392 if (callee->IsVariableProxy()) {
2393 { StackValueContext context(this);
2394 EmitVariableLoad(callee->AsVariableProxy());
2395 PrepareForBailout(callee, NO_REGISTERS);
2396 }
2397 // Push undefined as receiver. This is patched in the method prologue if it
2398 // is a sloppy mode method.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002399 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002400 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2401 } else {
2402 // Load the function from the receiver.
2403 DCHECK(callee->IsProperty());
2404 DCHECK(!callee->AsProperty()->IsSuperAccess());
2405 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2406 EmitNamedPropertyLoad(callee->AsProperty());
2407 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2408 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002409 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002410 __ mov(Operand(esp, kPointerSize), eax);
2411 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2412 }
2413
2414 EmitCall(expr, convert_mode);
2415}
2416
2417
2418void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2419 SetExpressionPosition(expr);
2420 Expression* callee = expr->expression();
2421 DCHECK(callee->IsProperty());
2422 Property* prop = callee->AsProperty();
2423 DCHECK(prop->IsSuperAccess());
2424
2425 Literal* key = prop->key()->AsLiteral();
2426 DCHECK(!key->value()->IsSmi());
2427 // Load the function from the receiver.
2428 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2429 VisitForStackValue(super_ref->home_object());
2430 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002431 PushOperand(eax);
2432 PushOperand(eax);
2433 PushOperand(Operand(esp, kPointerSize * 2));
2434 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002435 // Stack here:
2436 // - home_object
2437 // - this (receiver)
2438 // - this (receiver) <-- LoadFromSuper will pop here and below.
2439 // - home_object
2440 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002441 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002442
2443 // Replace home_object with target function.
2444 __ mov(Operand(esp, kPointerSize), eax);
2445
2446 // Stack here:
2447 // - target function
2448 // - this (receiver)
2449 EmitCall(expr);
2450}
2451
2452
2453// Code common for calls using the IC.
2454void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2455 Expression* key) {
2456 // Load the key.
2457 VisitForAccumulatorValue(key);
2458
2459 Expression* callee = expr->expression();
2460
2461 // Load the function from the receiver.
2462 DCHECK(callee->IsProperty());
2463 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2464 __ mov(LoadDescriptor::NameRegister(), eax);
2465 EmitKeyedPropertyLoad(callee->AsProperty());
2466 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2467
2468 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002469 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002470 __ mov(Operand(esp, kPointerSize), eax);
2471
2472 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2473}
2474
2475
2476void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2477 Expression* callee = expr->expression();
2478 DCHECK(callee->IsProperty());
2479 Property* prop = callee->AsProperty();
2480 DCHECK(prop->IsSuperAccess());
2481
2482 SetExpressionPosition(prop);
2483 // Load the function from the receiver.
2484 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2485 VisitForStackValue(super_ref->home_object());
2486 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002487 PushOperand(eax);
2488 PushOperand(eax);
2489 PushOperand(Operand(esp, kPointerSize * 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002490 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002491 // Stack here:
2492 // - home_object
2493 // - this (receiver)
2494 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2495 // - home_object
2496 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002497 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002498
2499 // Replace home_object with target function.
2500 __ mov(Operand(esp, kPointerSize), eax);
2501
2502 // Stack here:
2503 // - target function
2504 // - this (receiver)
2505 EmitCall(expr);
2506}
2507
2508
2509void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2510 // Load the arguments.
2511 ZoneList<Expression*>* args = expr->arguments();
2512 int arg_count = args->length();
2513 for (int i = 0; i < arg_count; i++) {
2514 VisitForStackValue(args->at(i));
2515 }
2516
2517 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002518 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002519 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2520 if (FLAG_trace) {
2521 __ CallRuntime(Runtime::kTraceTailCall);
2522 }
2523 // Update profiling counters before the tail call since we will
2524 // not return to this function.
2525 EmitProfilingCounterHandlingForReturnSequence(true);
2526 }
2527 Handle<Code> ic =
2528 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2529 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002530 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2531 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2532 // Don't assign a type feedback id to the IC, since type feedback is provided
2533 // by the vector above.
2534 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002535 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002536
2537 RecordJSReturnSite(expr);
2538
2539 // Restore context register.
2540 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2541
2542 context()->DropAndPlug(1, eax);
2543}
2544
2545
2546void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2547 // Push copy of the first argument or undefined if it doesn't exist.
2548 if (arg_count > 0) {
2549 __ push(Operand(esp, arg_count * kPointerSize));
2550 } else {
2551 __ push(Immediate(isolate()->factory()->undefined_value()));
2552 }
2553
2554 // Push the enclosing function.
2555 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2556
2557 // Push the language mode.
2558 __ push(Immediate(Smi::FromInt(language_mode())));
2559
2560 // Push the start position of the scope the calls resides in.
2561 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2562
2563 // Do the runtime call.
2564 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2565}
2566
2567
2568// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2569void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2570 VariableProxy* callee = expr->expression()->AsVariableProxy();
2571 if (callee->var()->IsLookupSlot()) {
2572 Label slow, done;
2573 SetExpressionPosition(callee);
2574 // Generate code for loading from variables potentially shadowed by
2575 // eval-introduced variables.
2576 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2577
2578 __ bind(&slow);
2579 // Call the runtime to find the function to call (returned in eax) and
2580 // the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002581 __ Push(callee->name());
2582 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2583 PushOperand(eax); // Function.
2584 PushOperand(edx); // Receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002585 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2586
2587 // If fast case code has been generated, emit code to push the function
2588 // and receiver and have the slow path jump around this code.
2589 if (done.is_linked()) {
2590 Label call;
2591 __ jmp(&call, Label::kNear);
2592 __ bind(&done);
2593 // Push function.
2594 __ push(eax);
2595 // The receiver is implicitly the global receiver. Indicate this by
2596 // passing the hole to the call function stub.
2597 __ push(Immediate(isolate()->factory()->undefined_value()));
2598 __ bind(&call);
2599 }
2600 } else {
2601 VisitForStackValue(callee);
2602 // refEnv.WithBaseObject()
Ben Murdoch097c5b22016-05-18 11:27:45 +01002603 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002604 }
2605}
2606
2607
2608void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2609 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2610 // to resolve the function we need to call. Then we call the resolved
2611 // function using the given arguments.
2612 ZoneList<Expression*>* args = expr->arguments();
2613 int arg_count = args->length();
2614
2615 PushCalleeAndWithBaseObject(expr);
2616
2617 // Push the arguments.
2618 for (int i = 0; i < arg_count; i++) {
2619 VisitForStackValue(args->at(i));
2620 }
2621
2622 // Push a copy of the function (found below the arguments) and
2623 // resolve eval.
2624 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2625 EmitResolvePossiblyDirectEval(arg_count);
2626
2627 // Touch up the stack with the resolved function.
2628 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2629
2630 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2631
2632 SetCallPosition(expr);
2633 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2634 __ Set(eax, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002635 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2636 expr->tail_call_mode()),
2637 RelocInfo::CODE_TARGET);
2638 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002639 RecordJSReturnSite(expr);
2640 // Restore context register.
2641 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2642 context()->DropAndPlug(1, eax);
2643}
2644
2645
2646void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2647 Comment cmnt(masm_, "[ CallNew");
2648 // According to ECMA-262, section 11.2.2, page 44, the function
2649 // expression in new calls must be evaluated before the
2650 // arguments.
2651
2652 // Push constructor on the stack. If it's not a function it's used as
2653 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2654 // ignored.
2655 DCHECK(!expr->expression()->IsSuperPropertyReference());
2656 VisitForStackValue(expr->expression());
2657
2658 // Push the arguments ("left-to-right") on the stack.
2659 ZoneList<Expression*>* args = expr->arguments();
2660 int arg_count = args->length();
2661 for (int i = 0; i < arg_count; i++) {
2662 VisitForStackValue(args->at(i));
2663 }
2664
2665 // Call the construct call builtin that handles allocation and
2666 // constructor invocation.
2667 SetConstructCallPosition(expr);
2668
2669 // Load function and argument count into edi and eax.
2670 __ Move(eax, Immediate(arg_count));
2671 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2672
2673 // Record call targets in unoptimized code.
2674 __ EmitLoadTypeFeedbackVector(ebx);
2675 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2676
2677 CallConstructStub stub(isolate());
2678 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002679 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002680 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2681 // Restore context register.
2682 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2683 context()->Plug(eax);
2684}
2685
2686
2687void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2688 SuperCallReference* super_call_ref =
2689 expr->expression()->AsSuperCallReference();
2690 DCHECK_NOT_NULL(super_call_ref);
2691
2692 // Push the super constructor target on the stack (may be null,
2693 // but the Construct builtin can deal with that properly).
2694 VisitForAccumulatorValue(super_call_ref->this_function_var());
2695 __ AssertFunction(result_register());
2696 __ mov(result_register(),
2697 FieldOperand(result_register(), HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002698 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002699
2700 // Push the arguments ("left-to-right") on the stack.
2701 ZoneList<Expression*>* args = expr->arguments();
2702 int arg_count = args->length();
2703 for (int i = 0; i < arg_count; i++) {
2704 VisitForStackValue(args->at(i));
2705 }
2706
2707 // Call the construct call builtin that handles allocation and
2708 // constructor invocation.
2709 SetConstructCallPosition(expr);
2710
2711 // Load new target into edx.
2712 VisitForAccumulatorValue(super_call_ref->new_target_var());
2713 __ mov(edx, result_register());
2714
2715 // Load function and argument count into edi and eax.
2716 __ Move(eax, Immediate(arg_count));
2717 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2718
2719 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002720 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002721
2722 RecordJSReturnSite(expr);
2723
2724 // Restore context register.
2725 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2726 context()->Plug(eax);
2727}
2728
2729
2730void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2731 ZoneList<Expression*>* args = expr->arguments();
2732 DCHECK(args->length() == 1);
2733
2734 VisitForAccumulatorValue(args->at(0));
2735
2736 Label materialize_true, materialize_false;
2737 Label* if_true = NULL;
2738 Label* if_false = NULL;
2739 Label* fall_through = NULL;
2740 context()->PrepareTest(&materialize_true, &materialize_false,
2741 &if_true, &if_false, &fall_through);
2742
2743 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2744 __ test(eax, Immediate(kSmiTagMask));
2745 Split(zero, if_true, if_false, fall_through);
2746
2747 context()->Plug(if_true, if_false);
2748}
2749
2750
2751void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2752 ZoneList<Expression*>* args = expr->arguments();
2753 DCHECK(args->length() == 1);
2754
2755 VisitForAccumulatorValue(args->at(0));
2756
2757 Label materialize_true, materialize_false;
2758 Label* if_true = NULL;
2759 Label* if_false = NULL;
2760 Label* fall_through = NULL;
2761 context()->PrepareTest(&materialize_true, &materialize_false,
2762 &if_true, &if_false, &fall_through);
2763
2764 __ JumpIfSmi(eax, if_false);
2765 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2766 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2767 Split(above_equal, if_true, if_false, fall_through);
2768
2769 context()->Plug(if_true, if_false);
2770}
2771
2772
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002773void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2774 ZoneList<Expression*>* args = expr->arguments();
2775 DCHECK(args->length() == 1);
2776
2777 VisitForAccumulatorValue(args->at(0));
2778
2779 Label materialize_true, materialize_false;
2780 Label* if_true = NULL;
2781 Label* if_false = NULL;
2782 Label* fall_through = NULL;
2783 context()->PrepareTest(&materialize_true, &materialize_false,
2784 &if_true, &if_false, &fall_through);
2785
2786 __ JumpIfSmi(eax, if_false);
2787 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2788 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2789 Split(equal, if_true, if_false, fall_through);
2790
2791 context()->Plug(if_true, if_false);
2792}
2793
2794
2795void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2796 ZoneList<Expression*>* args = expr->arguments();
2797 DCHECK(args->length() == 1);
2798
2799 VisitForAccumulatorValue(args->at(0));
2800
2801 Label materialize_true, materialize_false;
2802 Label* if_true = NULL;
2803 Label* if_false = NULL;
2804 Label* fall_through = NULL;
2805 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2806 &if_false, &fall_through);
2807
2808 __ JumpIfSmi(eax, if_false);
2809 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2810 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2811 Split(equal, if_true, if_false, fall_through);
2812
2813 context()->Plug(if_true, if_false);
2814}
2815
2816
2817void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2818 ZoneList<Expression*>* args = expr->arguments();
2819 DCHECK(args->length() == 1);
2820
2821 VisitForAccumulatorValue(args->at(0));
2822
2823 Label materialize_true, materialize_false;
2824 Label* if_true = NULL;
2825 Label* if_false = NULL;
2826 Label* fall_through = NULL;
2827 context()->PrepareTest(&materialize_true, &materialize_false,
2828 &if_true, &if_false, &fall_through);
2829
2830 __ JumpIfSmi(eax, if_false);
2831 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2832 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2833 Split(equal, if_true, if_false, fall_through);
2834
2835 context()->Plug(if_true, if_false);
2836}
2837
2838
2839void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2840 ZoneList<Expression*>* args = expr->arguments();
2841 DCHECK(args->length() == 1);
2842
2843 VisitForAccumulatorValue(args->at(0));
2844
2845 Label materialize_true, materialize_false;
2846 Label* if_true = NULL;
2847 Label* if_false = NULL;
2848 Label* fall_through = NULL;
2849 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2850 &if_false, &fall_through);
2851
2852 __ JumpIfSmi(eax, if_false);
2853 __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2854 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2855 Split(equal, if_true, if_false, fall_through);
2856
2857 context()->Plug(if_true, if_false);
2858}
2859
2860
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002861void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2862 ZoneList<Expression*>* args = expr->arguments();
2863 DCHECK(args->length() == 1);
2864 Label done, null, function, non_function_constructor;
2865
2866 VisitForAccumulatorValue(args->at(0));
2867
2868 // If the object is not a JSReceiver, we return null.
2869 __ JumpIfSmi(eax, &null, Label::kNear);
2870 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2871 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2872 __ j(below, &null, Label::kNear);
2873
Ben Murdochda12d292016-06-02 14:46:10 +01002874 // Return 'Function' for JSFunction and JSBoundFunction objects.
2875 __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
2876 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2877 __ j(above_equal, &function, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002878
2879 // Check if the constructor in the map is a JS function.
2880 __ GetMapConstructor(eax, eax, ebx);
2881 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2882 __ j(not_equal, &non_function_constructor, Label::kNear);
2883
2884 // eax now contains the constructor function. Grab the
2885 // instance class name from there.
2886 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2887 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2888 __ jmp(&done, Label::kNear);
2889
2890 // Non-JS objects have class null.
2891 __ bind(&null);
2892 __ mov(eax, isolate()->factory()->null_value());
2893 __ jmp(&done, Label::kNear);
2894
2895 // Functions have class 'Function'.
2896 __ bind(&function);
2897 __ mov(eax, isolate()->factory()->Function_string());
2898 __ jmp(&done, Label::kNear);
2899
2900 // Objects with a non-function constructor have class 'Object'.
2901 __ bind(&non_function_constructor);
2902 __ mov(eax, isolate()->factory()->Object_string());
2903
2904 // All done.
2905 __ bind(&done);
2906
2907 context()->Plug(eax);
2908}
2909
2910
2911void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2912 ZoneList<Expression*>* args = expr->arguments();
2913 DCHECK(args->length() == 1);
2914
2915 VisitForAccumulatorValue(args->at(0)); // Load the object.
2916
2917 Label done;
2918 // If the object is a smi return the object.
2919 __ JumpIfSmi(eax, &done, Label::kNear);
2920 // If the object is not a value type, return the object.
2921 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2922 __ j(not_equal, &done, Label::kNear);
2923 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2924
2925 __ bind(&done);
2926 context()->Plug(eax);
2927}
2928
2929
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002930void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2931 ZoneList<Expression*>* args = expr->arguments();
2932 DCHECK_EQ(3, args->length());
2933
2934 Register string = eax;
2935 Register index = ebx;
2936 Register value = ecx;
2937
2938 VisitForStackValue(args->at(0)); // index
2939 VisitForStackValue(args->at(1)); // value
2940 VisitForAccumulatorValue(args->at(2)); // string
2941
Ben Murdoch097c5b22016-05-18 11:27:45 +01002942 PopOperand(value);
2943 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002944
2945 if (FLAG_debug_code) {
2946 __ test(value, Immediate(kSmiTagMask));
2947 __ Check(zero, kNonSmiValue);
2948 __ test(index, Immediate(kSmiTagMask));
2949 __ Check(zero, kNonSmiValue);
2950 }
2951
2952 __ SmiUntag(value);
2953 __ SmiUntag(index);
2954
2955 if (FLAG_debug_code) {
2956 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2957 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
2958 }
2959
2960 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
2961 value);
2962 context()->Plug(string);
2963}
2964
2965
2966void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2967 ZoneList<Expression*>* args = expr->arguments();
2968 DCHECK_EQ(3, args->length());
2969
2970 Register string = eax;
2971 Register index = ebx;
2972 Register value = ecx;
2973
2974 VisitForStackValue(args->at(0)); // index
2975 VisitForStackValue(args->at(1)); // value
2976 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002977 PopOperand(value);
2978 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002979
2980 if (FLAG_debug_code) {
2981 __ test(value, Immediate(kSmiTagMask));
2982 __ Check(zero, kNonSmiValue);
2983 __ test(index, Immediate(kSmiTagMask));
2984 __ Check(zero, kNonSmiValue);
2985 __ SmiUntag(index);
2986 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2987 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
2988 __ SmiTag(index);
2989 }
2990
2991 __ SmiUntag(value);
2992 // No need to untag a smi for two-byte addressing.
2993 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
2994 value);
2995 context()->Plug(string);
2996}
2997
2998
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002999void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3000 ZoneList<Expression*>* args = expr->arguments();
3001 DCHECK(args->length() == 1);
3002
3003 VisitForAccumulatorValue(args->at(0));
3004
3005 Label done;
3006 StringCharFromCodeGenerator generator(eax, ebx);
3007 generator.GenerateFast(masm_);
3008 __ jmp(&done);
3009
3010 NopRuntimeCallHelper call_helper;
3011 generator.GenerateSlow(masm_, call_helper);
3012
3013 __ bind(&done);
3014 context()->Plug(ebx);
3015}
3016
3017
3018void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3019 ZoneList<Expression*>* args = expr->arguments();
3020 DCHECK(args->length() == 2);
3021
3022 VisitForStackValue(args->at(0));
3023 VisitForAccumulatorValue(args->at(1));
3024
3025 Register object = ebx;
3026 Register index = eax;
3027 Register result = edx;
3028
Ben Murdoch097c5b22016-05-18 11:27:45 +01003029 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003030
3031 Label need_conversion;
3032 Label index_out_of_range;
3033 Label done;
3034 StringCharCodeAtGenerator generator(object,
3035 index,
3036 result,
3037 &need_conversion,
3038 &need_conversion,
3039 &index_out_of_range,
3040 STRING_INDEX_IS_NUMBER);
3041 generator.GenerateFast(masm_);
3042 __ jmp(&done);
3043
3044 __ bind(&index_out_of_range);
3045 // When the index is out of range, the spec requires us to return
3046 // NaN.
3047 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3048 __ jmp(&done);
3049
3050 __ bind(&need_conversion);
3051 // Move the undefined value into the result register, which will
3052 // trigger conversion.
3053 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3054 __ jmp(&done);
3055
3056 NopRuntimeCallHelper call_helper;
3057 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3058
3059 __ bind(&done);
3060 context()->Plug(result);
3061}
3062
3063
3064void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3065 ZoneList<Expression*>* args = expr->arguments();
3066 DCHECK(args->length() == 2);
3067
3068 VisitForStackValue(args->at(0));
3069 VisitForAccumulatorValue(args->at(1));
3070
3071 Register object = ebx;
3072 Register index = eax;
3073 Register scratch = edx;
3074 Register result = eax;
3075
Ben Murdoch097c5b22016-05-18 11:27:45 +01003076 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003077
3078 Label need_conversion;
3079 Label index_out_of_range;
3080 Label done;
3081 StringCharAtGenerator generator(object,
3082 index,
3083 scratch,
3084 result,
3085 &need_conversion,
3086 &need_conversion,
3087 &index_out_of_range,
3088 STRING_INDEX_IS_NUMBER);
3089 generator.GenerateFast(masm_);
3090 __ jmp(&done);
3091
3092 __ bind(&index_out_of_range);
3093 // When the index is out of range, the spec requires us to return
3094 // the empty string.
3095 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3096 __ jmp(&done);
3097
3098 __ bind(&need_conversion);
3099 // Move smi zero into the result register, which will trigger
3100 // conversion.
3101 __ Move(result, Immediate(Smi::FromInt(0)));
3102 __ jmp(&done);
3103
3104 NopRuntimeCallHelper call_helper;
3105 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3106
3107 __ bind(&done);
3108 context()->Plug(result);
3109}
3110
3111
3112void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3113 ZoneList<Expression*>* args = expr->arguments();
3114 DCHECK_LE(2, args->length());
3115 // Push target, receiver and arguments onto the stack.
3116 for (Expression* const arg : *args) {
3117 VisitForStackValue(arg);
3118 }
3119 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3120 // Move target to edi.
3121 int const argc = args->length() - 2;
3122 __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
3123 // Call the target.
3124 __ mov(eax, Immediate(argc));
3125 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003126 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003127 // Restore context register.
3128 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3129 // Discard the function left on TOS.
3130 context()->DropAndPlug(1, eax);
3131}
3132
3133
3134void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3135 ZoneList<Expression*>* args = expr->arguments();
3136 DCHECK(args->length() == 1);
3137
3138 VisitForAccumulatorValue(args->at(0));
3139
3140 __ AssertString(eax);
3141
3142 Label materialize_true, materialize_false;
3143 Label* if_true = NULL;
3144 Label* if_false = NULL;
3145 Label* fall_through = NULL;
3146 context()->PrepareTest(&materialize_true, &materialize_false,
3147 &if_true, &if_false, &fall_through);
3148
3149 __ test(FieldOperand(eax, String::kHashFieldOffset),
3150 Immediate(String::kContainsCachedArrayIndexMask));
3151 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3152 Split(zero, if_true, if_false, fall_through);
3153
3154 context()->Plug(if_true, if_false);
3155}
3156
3157
3158void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3159 ZoneList<Expression*>* args = expr->arguments();
3160 DCHECK(args->length() == 1);
3161 VisitForAccumulatorValue(args->at(0));
3162
3163 __ AssertString(eax);
3164
3165 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3166 __ IndexFromHash(eax, eax);
3167
3168 context()->Plug(eax);
3169}
3170
3171
3172void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3173 ZoneList<Expression*>* args = expr->arguments();
3174 DCHECK_EQ(1, args->length());
3175 VisitForAccumulatorValue(args->at(0));
3176 __ AssertFunction(eax);
3177 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3178 __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
3179 context()->Plug(eax);
3180}
3181
Ben Murdochda12d292016-06-02 14:46:10 +01003182void FullCodeGenerator::EmitGetOrdinaryHasInstance(CallRuntime* expr) {
3183 DCHECK_EQ(0, expr->arguments()->length());
3184 __ mov(eax, NativeContextOperand());
3185 __ mov(eax, ContextOperand(eax, Context::ORDINARY_HAS_INSTANCE_INDEX));
3186 context()->Plug(eax);
3187}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003189void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3190 DCHECK(expr->arguments()->length() == 0);
3191 ExternalReference debug_is_active =
3192 ExternalReference::debug_is_active_address(isolate());
3193 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
3194 __ SmiTag(eax);
3195 context()->Plug(eax);
3196}
3197
3198
3199void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3200 ZoneList<Expression*>* args = expr->arguments();
3201 DCHECK_EQ(2, args->length());
3202 VisitForStackValue(args->at(0));
3203 VisitForStackValue(args->at(1));
3204
3205 Label runtime, done;
3206
3207 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime, TAG_OBJECT);
3208 __ mov(ebx, NativeContextOperand());
3209 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
3210 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
3211 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3212 isolate()->factory()->empty_fixed_array());
3213 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3214 isolate()->factory()->empty_fixed_array());
3215 __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
3216 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
3217 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3218 __ jmp(&done, Label::kNear);
3219
3220 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003221 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003222
3223 __ bind(&done);
3224 context()->Plug(eax);
3225}
3226
3227
3228void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003229 // Push function.
3230 __ LoadGlobalFunction(expr->context_index(), eax);
3231 PushOperand(eax);
3232
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003233 // Push undefined as receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003234 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003235}
3236
3237
3238void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3239 ZoneList<Expression*>* args = expr->arguments();
3240 int arg_count = args->length();
3241
3242 SetCallPosition(expr);
3243 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3244 __ Set(eax, arg_count);
3245 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3246 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003247 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003248
Ben Murdochda12d292016-06-02 14:46:10 +01003249 // Restore context register.
3250 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003251}
3252
3253
3254void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3255 switch (expr->op()) {
3256 case Token::DELETE: {
3257 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3258 Property* property = expr->expression()->AsProperty();
3259 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3260
3261 if (property != NULL) {
3262 VisitForStackValue(property->obj());
3263 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003264 CallRuntimeWithOperands(is_strict(language_mode())
3265 ? Runtime::kDeleteProperty_Strict
3266 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003267 context()->Plug(eax);
3268 } else if (proxy != NULL) {
3269 Variable* var = proxy->var();
3270 // Delete of an unqualified identifier is disallowed in strict mode but
3271 // "delete this" is allowed.
3272 bool is_this = var->HasThisName(isolate());
3273 DCHECK(is_sloppy(language_mode()) || is_this);
3274 if (var->IsUnallocatedOrGlobalSlot()) {
3275 __ mov(eax, NativeContextOperand());
3276 __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
3277 __ push(Immediate(var->name()));
3278 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3279 context()->Plug(eax);
3280 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3281 // Result of deleting non-global variables is false. 'this' is
3282 // not really a variable, though we implement it as one. The
3283 // subexpression does not have side effects.
3284 context()->Plug(is_this);
3285 } else {
3286 // Non-global variable. Call the runtime to try to delete from the
3287 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003288 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003289 __ CallRuntime(Runtime::kDeleteLookupSlot);
3290 context()->Plug(eax);
3291 }
3292 } else {
3293 // Result of deleting non-property, non-variable reference is true.
3294 // The subexpression may have side effects.
3295 VisitForEffect(expr->expression());
3296 context()->Plug(true);
3297 }
3298 break;
3299 }
3300
3301 case Token::VOID: {
3302 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3303 VisitForEffect(expr->expression());
3304 context()->Plug(isolate()->factory()->undefined_value());
3305 break;
3306 }
3307
3308 case Token::NOT: {
3309 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3310 if (context()->IsEffect()) {
3311 // Unary NOT has no side effects so it's only necessary to visit the
3312 // subexpression. Match the optimizing compiler by not branching.
3313 VisitForEffect(expr->expression());
3314 } else if (context()->IsTest()) {
3315 const TestContext* test = TestContext::cast(context());
3316 // The labels are swapped for the recursive call.
3317 VisitForControl(expr->expression(),
3318 test->false_label(),
3319 test->true_label(),
3320 test->fall_through());
3321 context()->Plug(test->true_label(), test->false_label());
3322 } else {
3323 // We handle value contexts explicitly rather than simply visiting
3324 // for control and plugging the control flow into the context,
3325 // because we need to prepare a pair of extra administrative AST ids
3326 // for the optimizing compiler.
3327 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3328 Label materialize_true, materialize_false, done;
3329 VisitForControl(expr->expression(),
3330 &materialize_false,
3331 &materialize_true,
3332 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003333 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003334 __ bind(&materialize_true);
3335 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3336 if (context()->IsAccumulatorValue()) {
3337 __ mov(eax, isolate()->factory()->true_value());
3338 } else {
3339 __ Push(isolate()->factory()->true_value());
3340 }
3341 __ jmp(&done, Label::kNear);
3342 __ bind(&materialize_false);
3343 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3344 if (context()->IsAccumulatorValue()) {
3345 __ mov(eax, isolate()->factory()->false_value());
3346 } else {
3347 __ Push(isolate()->factory()->false_value());
3348 }
3349 __ bind(&done);
3350 }
3351 break;
3352 }
3353
3354 case Token::TYPEOF: {
3355 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3356 {
3357 AccumulatorValueContext context(this);
3358 VisitForTypeofValue(expr->expression());
3359 }
3360 __ mov(ebx, eax);
3361 TypeofStub typeof_stub(isolate());
3362 __ CallStub(&typeof_stub);
3363 context()->Plug(eax);
3364 break;
3365 }
3366
3367 default:
3368 UNREACHABLE();
3369 }
3370}
3371
3372
3373void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3374 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3375
3376 Comment cmnt(masm_, "[ CountOperation");
3377
3378 Property* prop = expr->expression()->AsProperty();
3379 LhsKind assign_type = Property::GetAssignType(prop);
3380
3381 // Evaluate expression and get value.
3382 if (assign_type == VARIABLE) {
3383 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3384 AccumulatorValueContext context(this);
3385 EmitVariableLoad(expr->expression()->AsVariableProxy());
3386 } else {
3387 // Reserve space for result of postfix operation.
3388 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003389 PushOperand(Smi::FromInt(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003390 }
3391 switch (assign_type) {
3392 case NAMED_PROPERTY: {
3393 // Put the object both on the stack and in the register.
3394 VisitForStackValue(prop->obj());
3395 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
3396 EmitNamedPropertyLoad(prop);
3397 break;
3398 }
3399
3400 case NAMED_SUPER_PROPERTY: {
3401 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3402 VisitForAccumulatorValue(
3403 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003404 PushOperand(result_register());
3405 PushOperand(MemOperand(esp, kPointerSize));
3406 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003407 EmitNamedSuperPropertyLoad(prop);
3408 break;
3409 }
3410
3411 case KEYED_SUPER_PROPERTY: {
3412 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3413 VisitForStackValue(
3414 prop->obj()->AsSuperPropertyReference()->home_object());
3415 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003416 PushOperand(result_register());
3417 PushOperand(MemOperand(esp, 2 * kPointerSize));
3418 PushOperand(MemOperand(esp, 2 * kPointerSize));
3419 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003420 EmitKeyedSuperPropertyLoad(prop);
3421 break;
3422 }
3423
3424 case KEYED_PROPERTY: {
3425 VisitForStackValue(prop->obj());
3426 VisitForStackValue(prop->key());
3427 __ mov(LoadDescriptor::ReceiverRegister(),
3428 Operand(esp, kPointerSize)); // Object.
3429 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
3430 EmitKeyedPropertyLoad(prop);
3431 break;
3432 }
3433
3434 case VARIABLE:
3435 UNREACHABLE();
3436 }
3437 }
3438
3439 // We need a second deoptimization point after loading the value
3440 // in case evaluating the property load my have a side effect.
3441 if (assign_type == VARIABLE) {
3442 PrepareForBailout(expr->expression(), TOS_REG);
3443 } else {
3444 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3445 }
3446
3447 // Inline smi case if we are in a loop.
3448 Label done, stub_call;
3449 JumpPatchSite patch_site(masm_);
3450 if (ShouldInlineSmiCase(expr->op())) {
3451 Label slow;
3452 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
3453
3454 // Save result for postfix expressions.
3455 if (expr->is_postfix()) {
3456 if (!context()->IsEffect()) {
3457 // Save the result on the stack. If we have a named or keyed property
3458 // we store the result under the receiver that is currently on top
3459 // of the stack.
3460 switch (assign_type) {
3461 case VARIABLE:
3462 __ push(eax);
3463 break;
3464 case NAMED_PROPERTY:
3465 __ mov(Operand(esp, kPointerSize), eax);
3466 break;
3467 case NAMED_SUPER_PROPERTY:
3468 __ mov(Operand(esp, 2 * kPointerSize), eax);
3469 break;
3470 case KEYED_PROPERTY:
3471 __ mov(Operand(esp, 2 * kPointerSize), eax);
3472 break;
3473 case KEYED_SUPER_PROPERTY:
3474 __ mov(Operand(esp, 3 * kPointerSize), eax);
3475 break;
3476 }
3477 }
3478 }
3479
3480 if (expr->op() == Token::INC) {
3481 __ add(eax, Immediate(Smi::FromInt(1)));
3482 } else {
3483 __ sub(eax, Immediate(Smi::FromInt(1)));
3484 }
3485 __ j(no_overflow, &done, Label::kNear);
3486 // Call stub. Undo operation first.
3487 if (expr->op() == Token::INC) {
3488 __ sub(eax, Immediate(Smi::FromInt(1)));
3489 } else {
3490 __ add(eax, Immediate(Smi::FromInt(1)));
3491 }
3492 __ jmp(&stub_call, Label::kNear);
3493 __ bind(&slow);
3494 }
Ben Murdochda12d292016-06-02 14:46:10 +01003495
3496 // Convert old value into a number.
3497 ToNumberStub convert_stub(isolate());
3498 __ CallStub(&convert_stub);
3499 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003500
3501 // Save result for postfix expressions.
3502 if (expr->is_postfix()) {
3503 if (!context()->IsEffect()) {
3504 // Save the result on the stack. If we have a named or keyed property
3505 // we store the result under the receiver that is currently on top
3506 // of the stack.
3507 switch (assign_type) {
3508 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003509 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003510 break;
3511 case NAMED_PROPERTY:
3512 __ mov(Operand(esp, kPointerSize), eax);
3513 break;
3514 case NAMED_SUPER_PROPERTY:
3515 __ mov(Operand(esp, 2 * kPointerSize), eax);
3516 break;
3517 case KEYED_PROPERTY:
3518 __ mov(Operand(esp, 2 * kPointerSize), eax);
3519 break;
3520 case KEYED_SUPER_PROPERTY:
3521 __ mov(Operand(esp, 3 * kPointerSize), eax);
3522 break;
3523 }
3524 }
3525 }
3526
3527 SetExpressionPosition(expr);
3528
3529 // Call stub for +1/-1.
3530 __ bind(&stub_call);
3531 __ mov(edx, eax);
3532 __ mov(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003533 Handle<Code> code =
3534 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003535 CallIC(code, expr->CountBinOpFeedbackId());
3536 patch_site.EmitPatchInfo();
3537 __ bind(&done);
3538
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003539 // Store the value returned in eax.
3540 switch (assign_type) {
3541 case VARIABLE:
3542 if (expr->is_postfix()) {
3543 // Perform the assignment as if via '='.
3544 { EffectContext context(this);
3545 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3546 Token::ASSIGN, expr->CountSlot());
3547 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3548 context.Plug(eax);
3549 }
3550 // For all contexts except EffectContext We have the result on
3551 // top of the stack.
3552 if (!context()->IsEffect()) {
3553 context()->PlugTOS();
3554 }
3555 } else {
3556 // Perform the assignment as if via '='.
3557 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3558 Token::ASSIGN, expr->CountSlot());
3559 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3560 context()->Plug(eax);
3561 }
3562 break;
3563 case NAMED_PROPERTY: {
3564 __ mov(StoreDescriptor::NameRegister(),
3565 prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003566 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003567 EmitLoadStoreICSlot(expr->CountSlot());
3568 CallStoreIC();
3569 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3570 if (expr->is_postfix()) {
3571 if (!context()->IsEffect()) {
3572 context()->PlugTOS();
3573 }
3574 } else {
3575 context()->Plug(eax);
3576 }
3577 break;
3578 }
3579 case NAMED_SUPER_PROPERTY: {
3580 EmitNamedSuperPropertyStore(prop);
3581 if (expr->is_postfix()) {
3582 if (!context()->IsEffect()) {
3583 context()->PlugTOS();
3584 }
3585 } else {
3586 context()->Plug(eax);
3587 }
3588 break;
3589 }
3590 case KEYED_SUPER_PROPERTY: {
3591 EmitKeyedSuperPropertyStore(prop);
3592 if (expr->is_postfix()) {
3593 if (!context()->IsEffect()) {
3594 context()->PlugTOS();
3595 }
3596 } else {
3597 context()->Plug(eax);
3598 }
3599 break;
3600 }
3601 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003602 PopOperand(StoreDescriptor::NameRegister());
3603 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003604 Handle<Code> ic =
3605 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3606 EmitLoadStoreICSlot(expr->CountSlot());
3607 CallIC(ic);
3608 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3609 if (expr->is_postfix()) {
3610 // Result is on the stack
3611 if (!context()->IsEffect()) {
3612 context()->PlugTOS();
3613 }
3614 } else {
3615 context()->Plug(eax);
3616 }
3617 break;
3618 }
3619 }
3620}
3621
3622
3623void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3624 Expression* sub_expr,
3625 Handle<String> check) {
3626 Label materialize_true, materialize_false;
3627 Label* if_true = NULL;
3628 Label* if_false = NULL;
3629 Label* fall_through = NULL;
3630 context()->PrepareTest(&materialize_true, &materialize_false,
3631 &if_true, &if_false, &fall_through);
3632
3633 { AccumulatorValueContext context(this);
3634 VisitForTypeofValue(sub_expr);
3635 }
3636 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3637
3638 Factory* factory = isolate()->factory();
3639 if (String::Equals(check, factory->number_string())) {
3640 __ JumpIfSmi(eax, if_true);
3641 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3642 isolate()->factory()->heap_number_map());
3643 Split(equal, if_true, if_false, fall_through);
3644 } else if (String::Equals(check, factory->string_string())) {
3645 __ JumpIfSmi(eax, if_false);
3646 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3647 Split(below, if_true, if_false, fall_through);
3648 } else if (String::Equals(check, factory->symbol_string())) {
3649 __ JumpIfSmi(eax, if_false);
3650 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3651 Split(equal, if_true, if_false, fall_through);
3652 } else if (String::Equals(check, factory->boolean_string())) {
3653 __ cmp(eax, isolate()->factory()->true_value());
3654 __ j(equal, if_true);
3655 __ cmp(eax, isolate()->factory()->false_value());
3656 Split(equal, if_true, if_false, fall_through);
3657 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003658 __ cmp(eax, isolate()->factory()->null_value());
3659 __ j(equal, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003660 __ JumpIfSmi(eax, if_false);
3661 // Check for undetectable objects => true.
3662 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3663 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01003664 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003665 Split(not_zero, if_true, if_false, fall_through);
3666 } else if (String::Equals(check, factory->function_string())) {
3667 __ JumpIfSmi(eax, if_false);
3668 // Check for callable and not undetectable objects => true.
3669 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3670 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3671 __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3672 __ cmp(ecx, 1 << Map::kIsCallable);
3673 Split(equal, if_true, if_false, fall_through);
3674 } else if (String::Equals(check, factory->object_string())) {
3675 __ JumpIfSmi(eax, if_false);
3676 __ cmp(eax, isolate()->factory()->null_value());
3677 __ j(equal, if_true);
3678 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3679 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3680 __ j(below, if_false);
3681 // Check for callable or undetectable objects => false.
3682 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01003683 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003684 Split(zero, if_true, if_false, fall_through);
3685// clang-format off
3686#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3687 } else if (String::Equals(check, factory->type##_string())) { \
3688 __ JumpIfSmi(eax, if_false); \
3689 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \
3690 isolate()->factory()->type##_map()); \
3691 Split(equal, if_true, if_false, fall_through);
3692 SIMD128_TYPES(SIMD128_TYPE)
3693#undef SIMD128_TYPE
3694 // clang-format on
3695 } else {
3696 if (if_false != fall_through) __ jmp(if_false);
3697 }
3698 context()->Plug(if_true, if_false);
3699}
3700
3701
3702void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3703 Comment cmnt(masm_, "[ CompareOperation");
3704 SetExpressionPosition(expr);
3705
3706 // First we try a fast inlined version of the compare when one of
3707 // the operands is a literal.
3708 if (TryLiteralCompare(expr)) return;
3709
3710 // Always perform the comparison for its control flow. Pack the result
3711 // into the expression's context after the comparison is performed.
3712 Label materialize_true, materialize_false;
3713 Label* if_true = NULL;
3714 Label* if_false = NULL;
3715 Label* fall_through = NULL;
3716 context()->PrepareTest(&materialize_true, &materialize_false,
3717 &if_true, &if_false, &fall_through);
3718
3719 Token::Value op = expr->op();
3720 VisitForStackValue(expr->left());
3721 switch (op) {
3722 case Token::IN:
3723 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003724 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003725 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3726 __ cmp(eax, isolate()->factory()->true_value());
3727 Split(equal, if_true, if_false, fall_through);
3728 break;
3729
3730 case Token::INSTANCEOF: {
3731 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003732 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003733 InstanceOfStub stub(isolate());
3734 __ CallStub(&stub);
3735 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3736 __ cmp(eax, isolate()->factory()->true_value());
3737 Split(equal, if_true, if_false, fall_through);
3738 break;
3739 }
3740
3741 default: {
3742 VisitForAccumulatorValue(expr->right());
3743 Condition cc = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003744 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003745
3746 bool inline_smi_code = ShouldInlineSmiCase(op);
3747 JumpPatchSite patch_site(masm_);
3748 if (inline_smi_code) {
3749 Label slow_case;
3750 __ mov(ecx, edx);
3751 __ or_(ecx, eax);
3752 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3753 __ cmp(edx, eax);
3754 Split(cc, if_true, if_false, NULL);
3755 __ bind(&slow_case);
3756 }
3757
Ben Murdoch097c5b22016-05-18 11:27:45 +01003758 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003759 CallIC(ic, expr->CompareOperationFeedbackId());
3760 patch_site.EmitPatchInfo();
3761
3762 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3763 __ test(eax, eax);
3764 Split(cc, if_true, if_false, fall_through);
3765 }
3766 }
3767
3768 // Convert the result of the comparison into one expected for this
3769 // expression's context.
3770 context()->Plug(if_true, if_false);
3771}
3772
3773
3774void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3775 Expression* sub_expr,
3776 NilValue nil) {
3777 Label materialize_true, materialize_false;
3778 Label* if_true = NULL;
3779 Label* if_false = NULL;
3780 Label* fall_through = NULL;
3781 context()->PrepareTest(&materialize_true, &materialize_false,
3782 &if_true, &if_false, &fall_through);
3783
3784 VisitForAccumulatorValue(sub_expr);
3785 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3786
3787 Handle<Object> nil_value = nil == kNullValue
3788 ? isolate()->factory()->null_value()
3789 : isolate()->factory()->undefined_value();
3790 if (expr->op() == Token::EQ_STRICT) {
3791 __ cmp(eax, nil_value);
3792 Split(equal, if_true, if_false, fall_through);
3793 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003794 __ JumpIfSmi(eax, if_false);
3795 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3796 __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
3797 Immediate(1 << Map::kIsUndetectable));
3798 Split(not_zero, if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003799 }
3800 context()->Plug(if_true, if_false);
3801}
3802
3803
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003804Register FullCodeGenerator::result_register() {
3805 return eax;
3806}
3807
3808
3809Register FullCodeGenerator::context_register() {
3810 return esi;
3811}
3812
Ben Murdochda12d292016-06-02 14:46:10 +01003813void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3814 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3815 __ mov(value, Operand(ebp, frame_offset));
3816}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003817
3818void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3819 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3820 __ mov(Operand(ebp, frame_offset), value);
3821}
3822
3823
3824void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3825 __ mov(dst, ContextOperand(esi, context_index));
3826}
3827
3828
3829void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3830 Scope* closure_scope = scope()->ClosureScope();
3831 if (closure_scope->is_script_scope() ||
3832 closure_scope->is_module_scope()) {
3833 // Contexts nested in the native context have a canonical empty function
3834 // as their closure, not the anonymous closure containing the global
3835 // code.
3836 __ mov(eax, NativeContextOperand());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003837 PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003838 } else if (closure_scope->is_eval_scope()) {
3839 // Contexts nested inside eval code have the same closure as the context
3840 // calling eval, not the anonymous closure containing the eval code.
3841 // Fetch it from the context.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003842 PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003843 } else {
3844 DCHECK(closure_scope->is_function_scope());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003845 PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003846 }
3847}
3848
3849
3850// ----------------------------------------------------------------------------
3851// Non-local control flow support.
3852
3853void FullCodeGenerator::EnterFinallyBlock() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003854 // Store pending message while executing finally block.
3855 ExternalReference pending_message_obj =
3856 ExternalReference::address_of_pending_message_obj(isolate());
3857 __ mov(edx, Operand::StaticVariable(pending_message_obj));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003858 PushOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003859
3860 ClearPendingMessage();
3861}
3862
3863
3864void FullCodeGenerator::ExitFinallyBlock() {
3865 DCHECK(!result_register().is(edx));
3866 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003867 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003868 ExternalReference pending_message_obj =
3869 ExternalReference::address_of_pending_message_obj(isolate());
3870 __ mov(Operand::StaticVariable(pending_message_obj), edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003871}
3872
3873
3874void FullCodeGenerator::ClearPendingMessage() {
3875 DCHECK(!result_register().is(edx));
3876 ExternalReference pending_message_obj =
3877 ExternalReference::address_of_pending_message_obj(isolate());
3878 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
3879 __ mov(Operand::StaticVariable(pending_message_obj), edx);
3880}
3881
3882
Ben Murdoch097c5b22016-05-18 11:27:45 +01003883void FullCodeGenerator::DeferredCommands::EmitCommands() {
3884 DCHECK(!result_register().is(edx));
3885 __ Pop(result_register()); // Restore the accumulator.
3886 __ Pop(edx); // Get the token.
3887 for (DeferredCommand cmd : commands_) {
3888 Label skip;
3889 __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
3890 __ j(not_equal, &skip);
3891 switch (cmd.command) {
3892 case kReturn:
3893 codegen_->EmitUnwindAndReturn();
3894 break;
3895 case kThrow:
3896 __ Push(result_register());
3897 __ CallRuntime(Runtime::kReThrow);
3898 break;
3899 case kContinue:
3900 codegen_->EmitContinue(cmd.target);
3901 break;
3902 case kBreak:
3903 codegen_->EmitBreak(cmd.target);
3904 break;
3905 }
3906 __ bind(&skip);
3907 }
3908}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003909
3910#undef __
3911
3912
3913static const byte kJnsInstruction = 0x79;
3914static const byte kJnsOffset = 0x11;
3915static const byte kNopByteOne = 0x66;
3916static const byte kNopByteTwo = 0x90;
3917#ifdef DEBUG
3918static const byte kCallInstruction = 0xe8;
3919#endif
3920
3921
3922void BackEdgeTable::PatchAt(Code* unoptimized_code,
3923 Address pc,
3924 BackEdgeState target_state,
3925 Code* replacement_code) {
3926 Address call_target_address = pc - kIntSize;
3927 Address jns_instr_address = call_target_address - 3;
3928 Address jns_offset_address = call_target_address - 2;
3929
3930 switch (target_state) {
3931 case INTERRUPT:
3932 // sub <profiling_counter>, <delta> ;; Not changed
3933 // jns ok
3934 // call <interrupt stub>
3935 // ok:
3936 *jns_instr_address = kJnsInstruction;
3937 *jns_offset_address = kJnsOffset;
3938 break;
3939 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003940 // sub <profiling_counter>, <delta> ;; Not changed
3941 // nop
3942 // nop
3943 // call <on-stack replacment>
3944 // ok:
3945 *jns_instr_address = kNopByteOne;
3946 *jns_offset_address = kNopByteTwo;
3947 break;
3948 }
3949
3950 Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3951 call_target_address, unoptimized_code,
3952 replacement_code->entry());
3953 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3954 unoptimized_code, call_target_address, replacement_code);
3955}
3956
3957
3958BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3959 Isolate* isolate,
3960 Code* unoptimized_code,
3961 Address pc) {
3962 Address call_target_address = pc - kIntSize;
3963 Address jns_instr_address = call_target_address - 3;
3964 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3965
3966 if (*jns_instr_address == kJnsInstruction) {
3967 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3968 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3969 Assembler::target_address_at(call_target_address,
3970 unoptimized_code));
3971 return INTERRUPT;
3972 }
3973
3974 DCHECK_EQ(kNopByteOne, *jns_instr_address);
3975 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3976
Ben Murdochda12d292016-06-02 14:46:10 +01003977 DCHECK_EQ(
3978 isolate->builtins()->OnStackReplacement()->entry(),
3979 Assembler::target_address_at(call_target_address, unoptimized_code));
3980 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003981}
3982
3983
3984} // namespace internal
3985} // namespace v8
3986
3987#endif // V8_TARGET_ARCH_X87