blob: 760a818552473422b979d392f1bd0252daa4a519 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_IA32
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ia32/frames-ia32.h"
14#include "src/ic/ic.h"
15#include "src/parsing/parser.h"
16
17namespace v8 {
18namespace internal {
19
Ben Murdoch097c5b22016-05-18 11:27:45 +010020#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021
22class JumpPatchSite BASE_EMBEDDED {
23 public:
24 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
25#ifdef DEBUG
26 info_emitted_ = false;
27#endif
28 }
29
30 ~JumpPatchSite() {
31 DCHECK(patch_site_.is_bound() == info_emitted_);
32 }
33
34 void EmitJumpIfNotSmi(Register reg,
35 Label* target,
36 Label::Distance distance = Label::kFar) {
37 __ test(reg, Immediate(kSmiTagMask));
38 EmitJump(not_carry, target, distance); // Always taken before patched.
39 }
40
41 void EmitJumpIfSmi(Register reg,
42 Label* target,
43 Label::Distance distance = Label::kFar) {
44 __ test(reg, Immediate(kSmiTagMask));
45 EmitJump(carry, target, distance); // Never taken before patched.
46 }
47
48 void EmitPatchInfo() {
49 if (patch_site_.is_bound()) {
50 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
51 DCHECK(is_uint8(delta_to_patch_site));
52 __ test(eax, Immediate(delta_to_patch_site));
53#ifdef DEBUG
54 info_emitted_ = true;
55#endif
56 } else {
57 __ nop(); // Signals no inlined code.
58 }
59 }
60
61 private:
62 // jc will be patched with jz, jnc will become jnz.
63 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
64 DCHECK(!patch_site_.is_bound() && !info_emitted_);
65 DCHECK(cc == carry || cc == not_carry);
66 __ bind(&patch_site_);
67 __ j(cc, target, distance);
68 }
69
Ben Murdoch097c5b22016-05-18 11:27:45 +010070 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 MacroAssembler* masm_;
72 Label patch_site_;
73#ifdef DEBUG
74 bool info_emitted_;
75#endif
76};
77
78
79// Generate code for a JS function. On entry to the function the receiver
80// and arguments have been pushed on the stack left to right, with the
81// return address on top of them. The actual argument count matches the
82// formal parameter count expected by the function.
83//
84// The live registers are:
85// o edi: the JS function object being called (i.e. ourselves)
86// o edx: the new target value
87// o esi: our context
88// o ebp: our caller's frame pointer
89// o esp: stack pointer (pointing to return address)
90//
91// The function builds a JS frame. Please see JavaScriptFrameConstants in
92// frames-ia32.h for its layout.
93void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
95 profiling_counter_ = isolate()->factory()->NewCell(
96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97 SetFunctionPosition(literal());
98 Comment cmnt(masm_, "[ function compiled by full code generator");
99
100 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
103 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
104 __ mov(ecx, Operand(esp, receiver_offset));
105 __ AssertNotSmi(ecx);
106 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
107 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
108 }
109
110 // Open a frame scope to indicate that there is a frame on the stack. The
111 // MANUAL indicates that the scope shouldn't actually generate code to set up
112 // the frame (that is done below).
113 FrameScope frame_scope(masm_, StackFrame::MANUAL);
114
115 info->set_prologue_offset(masm_->pc_offset());
116 __ Prologue(info->GeneratePreagedPrologue());
117
118 { Comment cmnt(masm_, "[ Allocate locals");
119 int locals_count = info->scope()->num_stack_slots();
120 // Generators allocate locals, if any, in context slots.
121 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100122 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 if (locals_count == 1) {
124 __ push(Immediate(isolate()->factory()->undefined_value()));
125 } else if (locals_count > 1) {
126 if (locals_count >= 128) {
127 Label ok;
128 __ mov(ecx, esp);
129 __ sub(ecx, Immediate(locals_count * kPointerSize));
130 ExternalReference stack_limit =
131 ExternalReference::address_of_real_stack_limit(isolate());
132 __ cmp(ecx, Operand::StaticVariable(stack_limit));
133 __ j(above_equal, &ok, Label::kNear);
134 __ CallRuntime(Runtime::kThrowStackOverflow);
135 __ bind(&ok);
136 }
137 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
138 const int kMaxPushes = 32;
139 if (locals_count >= kMaxPushes) {
140 int loop_iterations = locals_count / kMaxPushes;
141 __ mov(ecx, loop_iterations);
142 Label loop_header;
143 __ bind(&loop_header);
144 // Do pushes.
145 for (int i = 0; i < kMaxPushes; i++) {
146 __ push(eax);
147 }
148 __ dec(ecx);
149 __ j(not_zero, &loop_header, Label::kNear);
150 }
151 int remaining = locals_count % kMaxPushes;
152 // Emit the remaining pushes.
153 for (int i = 0; i < remaining; i++) {
154 __ push(eax);
155 }
156 }
157 }
158
159 bool function_in_register = true;
160
161 // Possibly allocate a local context.
162 if (info->scope()->num_heap_slots() > 0) {
163 Comment cmnt(masm_, "[ Allocate context");
164 bool need_write_barrier = true;
165 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
166 // Argument to NewContext is the function, which is still in edi.
167 if (info->scope()->is_script_scope()) {
168 __ push(edi);
169 __ Push(info->scope()->GetScopeInfo(info->isolate()));
170 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100171 PrepareForBailoutForId(BailoutId::ScriptContext(),
172 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173 // The new target value is not used, clobbering is safe.
174 DCHECK_NULL(info->scope()->new_target_var());
175 } else {
176 if (info->scope()->new_target_var() != nullptr) {
177 __ push(edx); // Preserve new target.
178 }
179 if (slots <= FastNewContextStub::kMaximumSlots) {
180 FastNewContextStub stub(isolate(), slots);
181 __ CallStub(&stub);
182 // Result of FastNewContextStub is always in new space.
183 need_write_barrier = false;
184 } else {
185 __ push(edi);
186 __ CallRuntime(Runtime::kNewFunctionContext);
187 }
188 if (info->scope()->new_target_var() != nullptr) {
189 __ pop(edx); // Restore new target.
190 }
191 }
192 function_in_register = false;
193 // Context is returned in eax. It replaces the context passed to us.
194 // It's saved in the stack and kept live in esi.
195 __ mov(esi, eax);
196 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
197
198 // Copy parameters into context if necessary.
199 int num_parameters = info->scope()->num_parameters();
200 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
201 for (int i = first_parameter; i < num_parameters; i++) {
202 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
203 if (var->IsContextSlot()) {
204 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
205 (num_parameters - 1 - i) * kPointerSize;
206 // Load parameter from stack.
207 __ mov(eax, Operand(ebp, parameter_offset));
208 // Store it in the context.
209 int context_offset = Context::SlotOffset(var->index());
210 __ mov(Operand(esi, context_offset), eax);
211 // Update the write barrier. This clobbers eax and ebx.
212 if (need_write_barrier) {
213 __ RecordWriteContextSlot(esi,
214 context_offset,
215 eax,
216 ebx,
217 kDontSaveFPRegs);
218 } else if (FLAG_debug_code) {
219 Label done;
220 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
221 __ Abort(kExpectedNewSpaceObject);
222 __ bind(&done);
223 }
224 }
225 }
226 }
227
228 // Register holding this function and new target are both trashed in case we
229 // bailout here. But since that can happen only when new target is not used
230 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100231 PrepareForBailoutForId(BailoutId::FunctionContext(),
232 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000233
234 // Possibly set up a local binding to the this function which is used in
235 // derived constructors with super calls.
236 Variable* this_function_var = scope()->this_function_var();
237 if (this_function_var != nullptr) {
238 Comment cmnt(masm_, "[ This function");
239 if (!function_in_register) {
240 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
241 // The write barrier clobbers register again, keep it marked as such.
242 }
243 SetVar(this_function_var, edi, ebx, ecx);
244 }
245
246 // Possibly set up a local binding to the new target value.
247 Variable* new_target_var = scope()->new_target_var();
248 if (new_target_var != nullptr) {
249 Comment cmnt(masm_, "[ new.target");
250 SetVar(new_target_var, edx, ebx, ecx);
251 }
252
253 // Possibly allocate RestParameters
254 int rest_index;
255 Variable* rest_param = scope()->rest_parameter(&rest_index);
256 if (rest_param) {
257 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100258 if (!function_in_register) {
259 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
260 }
261 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100263 function_in_register = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000264 SetVar(rest_param, eax, ebx, edx);
265 }
266
267 Variable* arguments = scope()->arguments();
268 if (arguments != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 // Arguments object must be allocated after the context object, in
270 // case the "arguments" or ".arguments" variables are in the context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000272 if (!function_in_register) {
273 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
274 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100275 if (is_strict(language_mode()) || !has_simple_parameters()) {
276 FastNewStrictArgumentsStub stub(isolate());
277 __ CallStub(&stub);
278 } else if (literal()->has_duplicate_parameters()) {
279 __ Push(edi);
280 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
281 } else {
282 FastNewSloppyArgumentsStub stub(isolate());
283 __ CallStub(&stub);
284 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000285
286 SetVar(arguments, eax, ebx, edx);
287 }
288
289 if (FLAG_trace) {
290 __ CallRuntime(Runtime::kTraceEnter);
291 }
292
Ben Murdochda12d292016-06-02 14:46:10 +0100293 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100294 PrepareForBailoutForId(BailoutId::FunctionEntry(),
295 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100296 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000297 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100298 VisitDeclarations(scope()->declarations());
299 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000300
Ben Murdochda12d292016-06-02 14:46:10 +0100301 // Assert that the declarations do not use ICs. Otherwise the debugger
302 // won't be able to redirect a PC at an IC to the correct IC in newly
303 // recompiled code.
304 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305
Ben Murdochda12d292016-06-02 14:46:10 +0100306 {
307 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100308 PrepareForBailoutForId(BailoutId::Declarations(),
309 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100310 Label ok;
311 ExternalReference stack_limit =
312 ExternalReference::address_of_stack_limit(isolate());
313 __ cmp(esp, Operand::StaticVariable(stack_limit));
314 __ j(above_equal, &ok, Label::kNear);
315 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
316 __ bind(&ok);
317 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000318
Ben Murdochda12d292016-06-02 14:46:10 +0100319 {
320 Comment cmnt(masm_, "[ Body");
321 DCHECK(loop_depth() == 0);
322 VisitStatements(literal()->body());
323 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 }
325
326 // Always emit a 'return undefined' in case control fell off the end of
327 // the body.
328 { Comment cmnt(masm_, "[ return <undefined>;");
329 __ mov(eax, isolate()->factory()->undefined_value());
330 EmitReturnSequence();
331 }
332}
333
334
335void FullCodeGenerator::ClearAccumulator() {
336 __ Move(eax, Immediate(Smi::FromInt(0)));
337}
338
339
340void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
341 __ mov(ebx, Immediate(profiling_counter_));
342 __ sub(FieldOperand(ebx, Cell::kValueOffset),
343 Immediate(Smi::FromInt(delta)));
344}
345
346
347void FullCodeGenerator::EmitProfilingCounterReset() {
348 int reset_value = FLAG_interrupt_budget;
349 __ mov(ebx, Immediate(profiling_counter_));
350 __ mov(FieldOperand(ebx, Cell::kValueOffset),
351 Immediate(Smi::FromInt(reset_value)));
352}
353
354
355void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
356 Label* back_edge_target) {
357 Comment cmnt(masm_, "[ Back edge bookkeeping");
358 Label ok;
359
360 DCHECK(back_edge_target->is_bound());
361 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
362 int weight = Min(kMaxBackEdgeWeight,
363 Max(1, distance / kCodeSizeMultiplier));
364 EmitProfilingCounterDecrement(weight);
365 __ j(positive, &ok, Label::kNear);
366 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
367
368 // Record a mapping of this PC offset to the OSR id. This is used to find
369 // the AST id from the unoptimized code in order to use it as a key into
370 // the deoptimization input data found in the optimized code.
371 RecordBackEdge(stmt->OsrEntryId());
372
373 EmitProfilingCounterReset();
374
375 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100376 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000377 // Record a mapping of the OSR id to this PC. This is used if the OSR
378 // entry becomes the target of a bailout. We don't expect it to be, but
379 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100380 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000381}
382
Ben Murdoch097c5b22016-05-18 11:27:45 +0100383void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
384 bool is_tail_call) {
385 // Pretend that the exit is a backwards jump to the entry.
386 int weight = 1;
387 if (info_->ShouldSelfOptimize()) {
388 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
389 } else {
390 int distance = masm_->pc_offset();
391 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
392 }
393 EmitProfilingCounterDecrement(weight);
394 Label ok;
395 __ j(positive, &ok, Label::kNear);
396 // Don't need to save result register if we are going to do a tail call.
397 if (!is_tail_call) {
398 __ push(eax);
399 }
400 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
401 if (!is_tail_call) {
402 __ pop(eax);
403 }
404 EmitProfilingCounterReset();
405 __ bind(&ok);
406}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000407
408void FullCodeGenerator::EmitReturnSequence() {
409 Comment cmnt(masm_, "[ Return sequence");
410 if (return_label_.is_bound()) {
411 __ jmp(&return_label_);
412 } else {
413 // Common return label
414 __ bind(&return_label_);
415 if (FLAG_trace) {
416 __ push(eax);
417 __ CallRuntime(Runtime::kTraceExit);
418 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100419 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000420
421 SetReturnPosition(literal());
422 __ leave();
423
424 int arg_count = info_->scope()->num_parameters() + 1;
425 int arguments_bytes = arg_count * kPointerSize;
426 __ Ret(arguments_bytes, ecx);
427 }
428}
429
Ben Murdochc5610432016-08-08 18:44:38 +0100430void FullCodeGenerator::RestoreContext() {
431 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
432}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433
434void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
435 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
436 MemOperand operand = codegen()->VarOperand(var, result_register());
437 // Memory operands can be pushed directly.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100438 codegen()->PushOperand(operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000439}
440
441
442void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
443 UNREACHABLE(); // Not used on IA32.
444}
445
446
447void FullCodeGenerator::AccumulatorValueContext::Plug(
448 Heap::RootListIndex index) const {
449 UNREACHABLE(); // Not used on IA32.
450}
451
452
453void FullCodeGenerator::StackValueContext::Plug(
454 Heap::RootListIndex index) const {
455 UNREACHABLE(); // Not used on IA32.
456}
457
458
459void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
460 UNREACHABLE(); // Not used on IA32.
461}
462
463
464void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
465}
466
467
468void FullCodeGenerator::AccumulatorValueContext::Plug(
469 Handle<Object> lit) const {
470 if (lit->IsSmi()) {
471 __ SafeMove(result_register(), Immediate(lit));
472 } else {
473 __ Move(result_register(), Immediate(lit));
474 }
475}
476
477
478void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100479 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000480 if (lit->IsSmi()) {
481 __ SafePush(Immediate(lit));
482 } else {
483 __ push(Immediate(lit));
484 }
485}
486
487
488void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
489 codegen()->PrepareForBailoutBeforeSplit(condition(),
490 true,
491 true_label_,
492 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100493 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000494 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
495 if (false_label_ != fall_through_) __ jmp(false_label_);
496 } else if (lit->IsTrue() || lit->IsJSObject()) {
497 if (true_label_ != fall_through_) __ jmp(true_label_);
498 } else if (lit->IsString()) {
499 if (String::cast(*lit)->length() == 0) {
500 if (false_label_ != fall_through_) __ jmp(false_label_);
501 } else {
502 if (true_label_ != fall_through_) __ jmp(true_label_);
503 }
504 } else if (lit->IsSmi()) {
505 if (Smi::cast(*lit)->value() == 0) {
506 if (false_label_ != fall_through_) __ jmp(false_label_);
507 } else {
508 if (true_label_ != fall_through_) __ jmp(true_label_);
509 }
510 } else {
511 // For simplicity we always test the accumulator register.
512 __ mov(result_register(), lit);
513 codegen()->DoTest(this);
514 }
515}
516
517
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
519 Register reg) const {
520 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100521 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000522 __ mov(Operand(esp, 0), reg);
523}
524
525
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000526void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
527 Label* materialize_false) const {
528 DCHECK(materialize_true == materialize_false);
529 __ bind(materialize_true);
530}
531
532
533void FullCodeGenerator::AccumulatorValueContext::Plug(
534 Label* materialize_true,
535 Label* materialize_false) const {
536 Label done;
537 __ bind(materialize_true);
538 __ mov(result_register(), isolate()->factory()->true_value());
539 __ jmp(&done, Label::kNear);
540 __ bind(materialize_false);
541 __ mov(result_register(), isolate()->factory()->false_value());
542 __ bind(&done);
543}
544
545
546void FullCodeGenerator::StackValueContext::Plug(
547 Label* materialize_true,
548 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100549 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000550 Label done;
551 __ bind(materialize_true);
552 __ push(Immediate(isolate()->factory()->true_value()));
553 __ jmp(&done, Label::kNear);
554 __ bind(materialize_false);
555 __ push(Immediate(isolate()->factory()->false_value()));
556 __ bind(&done);
557}
558
559
560void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
561 Label* materialize_false) const {
562 DCHECK(materialize_true == true_label_);
563 DCHECK(materialize_false == false_label_);
564}
565
566
567void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
568 Handle<Object> value = flag
569 ? isolate()->factory()->true_value()
570 : isolate()->factory()->false_value();
571 __ mov(result_register(), value);
572}
573
574
575void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100576 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000577 Handle<Object> value = flag
578 ? isolate()->factory()->true_value()
579 : isolate()->factory()->false_value();
580 __ push(Immediate(value));
581}
582
583
584void FullCodeGenerator::TestContext::Plug(bool flag) const {
585 codegen()->PrepareForBailoutBeforeSplit(condition(),
586 true,
587 true_label_,
588 false_label_);
589 if (flag) {
590 if (true_label_ != fall_through_) __ jmp(true_label_);
591 } else {
592 if (false_label_ != fall_through_) __ jmp(false_label_);
593 }
594}
595
596
597void FullCodeGenerator::DoTest(Expression* condition,
598 Label* if_true,
599 Label* if_false,
600 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100601 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000602 CallIC(ic, condition->test_id());
603 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
604 Split(equal, if_true, if_false, fall_through);
605}
606
607
608void FullCodeGenerator::Split(Condition cc,
609 Label* if_true,
610 Label* if_false,
611 Label* fall_through) {
612 if (if_false == fall_through) {
613 __ j(cc, if_true);
614 } else if (if_true == fall_through) {
615 __ j(NegateCondition(cc), if_false);
616 } else {
617 __ j(cc, if_true);
618 __ jmp(if_false);
619 }
620}
621
622
623MemOperand FullCodeGenerator::StackOperand(Variable* var) {
624 DCHECK(var->IsStackAllocated());
625 // Offset is negative because higher indexes are at lower addresses.
626 int offset = -var->index() * kPointerSize;
627 // Adjust by a (parameter or local) base offset.
628 if (var->IsParameter()) {
629 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
630 } else {
631 offset += JavaScriptFrameConstants::kLocal0Offset;
632 }
633 return Operand(ebp, offset);
634}
635
636
637MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
638 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
639 if (var->IsContextSlot()) {
640 int context_chain_length = scope()->ContextChainLength(var->scope());
641 __ LoadContext(scratch, context_chain_length);
642 return ContextOperand(scratch, var->index());
643 } else {
644 return StackOperand(var);
645 }
646}
647
648
649void FullCodeGenerator::GetVar(Register dest, Variable* var) {
650 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
651 MemOperand location = VarOperand(var, dest);
652 __ mov(dest, location);
653}
654
655
656void FullCodeGenerator::SetVar(Variable* var,
657 Register src,
658 Register scratch0,
659 Register scratch1) {
660 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
661 DCHECK(!scratch0.is(src));
662 DCHECK(!scratch0.is(scratch1));
663 DCHECK(!scratch1.is(src));
664 MemOperand location = VarOperand(var, scratch0);
665 __ mov(location, src);
666
667 // Emit the write barrier code if the location is in the heap.
668 if (var->IsContextSlot()) {
669 int offset = Context::SlotOffset(var->index());
670 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
671 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
672 }
673}
674
675
676void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
677 bool should_normalize,
678 Label* if_true,
679 Label* if_false) {
680 // Only prepare for bailouts before splits if we're in a test
681 // context. Otherwise, we let the Visit function deal with the
682 // preparation to avoid preparing with the same AST id twice.
683 if (!context()->IsTest()) return;
684
685 Label skip;
686 if (should_normalize) __ jmp(&skip, Label::kNear);
Ben Murdochc5610432016-08-08 18:44:38 +0100687 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000688 if (should_normalize) {
689 __ cmp(eax, isolate()->factory()->true_value());
690 Split(equal, if_true, if_false, NULL);
691 __ bind(&skip);
692 }
693}
694
695
696void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
697 // The variable in the declaration always resides in the current context.
698 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100699 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000700 // Check that we're not inside a with or catch context.
701 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
702 __ cmp(ebx, isolate()->factory()->with_context_map());
703 __ Check(not_equal, kDeclarationInWithContext);
704 __ cmp(ebx, isolate()->factory()->catch_context_map());
705 __ Check(not_equal, kDeclarationInCatchContext);
706 }
707}
708
709
710void FullCodeGenerator::VisitVariableDeclaration(
711 VariableDeclaration* declaration) {
712 // If it was not possible to allocate the variable at compile time, we
713 // need to "declare" it at runtime to make sure it actually exists in the
714 // local context.
715 VariableProxy* proxy = declaration->proxy();
716 VariableMode mode = declaration->mode();
717 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100718 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000719 switch (variable->location()) {
720 case VariableLocation::GLOBAL:
721 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100722 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000723 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100724 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000725 break;
726
727 case VariableLocation::PARAMETER:
728 case VariableLocation::LOCAL:
729 if (hole_init) {
730 Comment cmnt(masm_, "[ VariableDeclaration");
731 __ mov(StackOperand(variable),
732 Immediate(isolate()->factory()->the_hole_value()));
733 }
734 break;
735
736 case VariableLocation::CONTEXT:
737 if (hole_init) {
738 Comment cmnt(masm_, "[ VariableDeclaration");
739 EmitDebugCheckDeclarationContext(variable);
740 __ mov(ContextOperand(esi, variable->index()),
741 Immediate(isolate()->factory()->the_hole_value()));
742 // No write barrier since the hole value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100743 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000744 }
745 break;
746
747 case VariableLocation::LOOKUP: {
748 Comment cmnt(masm_, "[ VariableDeclaration");
749 __ push(Immediate(variable->name()));
750 // VariableDeclaration nodes are always introduced in one of four modes.
751 DCHECK(IsDeclaredVariableMode(mode));
752 // Push initial value, if any.
753 // Note: For variables we must not push an initial value (such as
754 // 'undefined') because we may have a (legal) redeclaration and we
755 // must not destroy the current value.
756 if (hole_init) {
757 __ push(Immediate(isolate()->factory()->the_hole_value()));
758 } else {
759 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
760 }
761 __ push(
762 Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
763 __ CallRuntime(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100764 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000765 break;
766 }
767 }
768}
769
770
771void FullCodeGenerator::VisitFunctionDeclaration(
772 FunctionDeclaration* declaration) {
773 VariableProxy* proxy = declaration->proxy();
774 Variable* variable = proxy->var();
775 switch (variable->location()) {
776 case VariableLocation::GLOBAL:
777 case VariableLocation::UNALLOCATED: {
778 globals_->Add(variable->name(), zone());
779 Handle<SharedFunctionInfo> function =
780 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
781 // Check for stack-overflow exception.
782 if (function.is_null()) return SetStackOverflow();
783 globals_->Add(function, zone());
784 break;
785 }
786
787 case VariableLocation::PARAMETER:
788 case VariableLocation::LOCAL: {
789 Comment cmnt(masm_, "[ FunctionDeclaration");
790 VisitForAccumulatorValue(declaration->fun());
791 __ mov(StackOperand(variable), result_register());
792 break;
793 }
794
795 case VariableLocation::CONTEXT: {
796 Comment cmnt(masm_, "[ FunctionDeclaration");
797 EmitDebugCheckDeclarationContext(variable);
798 VisitForAccumulatorValue(declaration->fun());
799 __ mov(ContextOperand(esi, variable->index()), result_register());
800 // We know that we have written a function, which is not a smi.
801 __ RecordWriteContextSlot(esi,
802 Context::SlotOffset(variable->index()),
803 result_register(),
804 ecx,
805 kDontSaveFPRegs,
806 EMIT_REMEMBERED_SET,
807 OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100808 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000809 break;
810 }
811
812 case VariableLocation::LOOKUP: {
813 Comment cmnt(masm_, "[ FunctionDeclaration");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100814 PushOperand(variable->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000815 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100816 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
817 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100818 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000819 break;
820 }
821 }
822}
823
824
825void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
826 // Call the runtime to declare the globals.
827 __ Push(pairs);
828 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
829 __ CallRuntime(Runtime::kDeclareGlobals);
830 // Return value is ignored.
831}
832
833
834void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
835 // Call the runtime to declare the modules.
836 __ Push(descriptions);
837 __ CallRuntime(Runtime::kDeclareModules);
838 // Return value is ignored.
839}
840
841
842void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
843 Comment cmnt(masm_, "[ SwitchStatement");
844 Breakable nested_statement(this, stmt);
845 SetStatementPosition(stmt);
846
847 // Keep the switch value on the stack until a case matches.
848 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100849 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000850
851 ZoneList<CaseClause*>* clauses = stmt->cases();
852 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
853
854 Label next_test; // Recycled for each test.
855 // Compile all the tests with branches to their bodies.
856 for (int i = 0; i < clauses->length(); i++) {
857 CaseClause* clause = clauses->at(i);
858 clause->body_target()->Unuse();
859
860 // The default is not a test, but remember it as final fall through.
861 if (clause->is_default()) {
862 default_clause = clause;
863 continue;
864 }
865
866 Comment cmnt(masm_, "[ Case comparison");
867 __ bind(&next_test);
868 next_test.Unuse();
869
870 // Compile the label expression.
871 VisitForAccumulatorValue(clause->label());
872
873 // Perform the comparison as if via '==='.
874 __ mov(edx, Operand(esp, 0)); // Switch value.
875 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
876 JumpPatchSite patch_site(masm_);
877 if (inline_smi_code) {
878 Label slow_case;
879 __ mov(ecx, edx);
880 __ or_(ecx, eax);
881 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
882
883 __ cmp(edx, eax);
884 __ j(not_equal, &next_test);
885 __ Drop(1); // Switch value is no longer needed.
886 __ jmp(clause->body_target());
887 __ bind(&slow_case);
888 }
889
890 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100891 Handle<Code> ic =
892 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000893 CallIC(ic, clause->CompareId());
894 patch_site.EmitPatchInfo();
895
896 Label skip;
897 __ jmp(&skip, Label::kNear);
Ben Murdochc5610432016-08-08 18:44:38 +0100898 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000899 __ cmp(eax, isolate()->factory()->true_value());
900 __ j(not_equal, &next_test);
901 __ Drop(1);
902 __ jmp(clause->body_target());
903 __ bind(&skip);
904
905 __ test(eax, eax);
906 __ j(not_equal, &next_test);
907 __ Drop(1); // Switch value is no longer needed.
908 __ jmp(clause->body_target());
909 }
910
911 // Discard the test value and jump to the default if present, otherwise to
912 // the end of the statement.
913 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100914 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000915 if (default_clause == NULL) {
916 __ jmp(nested_statement.break_label());
917 } else {
918 __ jmp(default_clause->body_target());
919 }
920
921 // Compile all the case bodies.
922 for (int i = 0; i < clauses->length(); i++) {
923 Comment cmnt(masm_, "[ Case body");
924 CaseClause* clause = clauses->at(i);
925 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100926 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000927 VisitStatements(clause->statements());
928 }
929
930 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100931 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000932}
933
934
935void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
936 Comment cmnt(masm_, "[ ForInStatement");
937 SetStatementPosition(stmt, SKIP_BREAK);
938
939 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
940
Ben Murdoch097c5b22016-05-18 11:27:45 +0100941 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942 SetExpressionAsStatementPosition(stmt->enumerable());
943 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100944 OperandStackDepthIncrement(5);
945
946 Label loop, exit;
947 Iteration loop_statement(this, stmt);
948 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000949
Ben Murdoch097c5b22016-05-18 11:27:45 +0100950 // If the object is null or undefined, skip over the loop, otherwise convert
951 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000952 Label convert, done_convert;
953 __ JumpIfSmi(eax, &convert, Label::kNear);
954 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
955 __ j(above_equal, &done_convert, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100956 __ cmp(eax, isolate()->factory()->undefined_value());
957 __ j(equal, &exit);
958 __ cmp(eax, isolate()->factory()->null_value());
959 __ j(equal, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000960 __ bind(&convert);
961 ToObjectStub stub(isolate());
962 __ CallStub(&stub);
963 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +0100964 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000965 __ push(eax);
966
Ben Murdochc5610432016-08-08 18:44:38 +0100967 // Check cache validity in generated code. If we cannot guarantee cache
968 // validity, call the runtime system to check cache validity or get the
969 // property names in a fixed array. Note: Proxies never have an enum cache,
970 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100971 Label call_runtime, use_cache, fixed_array;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000972 __ CheckEnumCache(&call_runtime);
973
974 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
975 __ jmp(&use_cache, Label::kNear);
976
977 // Get the set of properties to enumerate.
978 __ bind(&call_runtime);
979 __ push(eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100980 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +0100981 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000982 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
983 isolate()->factory()->meta_map());
984 __ j(not_equal, &fixed_array);
985
986
987 // We got a map in register eax. Get the enumeration cache from it.
988 Label no_descriptors;
989 __ bind(&use_cache);
990
991 __ EnumLength(edx, eax);
992 __ cmp(edx, Immediate(Smi::FromInt(0)));
993 __ j(equal, &no_descriptors);
994
995 __ LoadInstanceDescriptors(eax, ecx);
996 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
997 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
998
999 // Set up the four remaining stack slots.
1000 __ push(eax); // Map.
1001 __ push(ecx); // Enumeration cache.
1002 __ push(edx); // Number of valid entries for the map in the enum cache.
1003 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1004 __ jmp(&loop);
1005
1006 __ bind(&no_descriptors);
1007 __ add(esp, Immediate(kPointerSize));
1008 __ jmp(&exit);
1009
1010 // We got a fixed array in register eax. Iterate through that.
1011 __ bind(&fixed_array);
1012
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001013 __ push(Immediate(Smi::FromInt(1))); // Smi(1) indicates slow check
1014 __ push(eax); // Array
1015 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1016 __ push(eax); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001017 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001018 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1019
1020 // Generate code for doing the condition check.
1021 __ bind(&loop);
1022 SetExpressionAsStatementPosition(stmt->each());
1023
1024 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1025 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1026 __ j(above_equal, loop_statement.break_label());
1027
1028 // Get the current entry of the array into register ebx.
1029 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1030 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1031
1032 // Get the expected map from the stack or a smi in the
1033 // permanent slow case into register edx.
1034 __ mov(edx, Operand(esp, 3 * kPointerSize));
1035
1036 // Check if the expected map still matches that of the enumerable.
1037 // If not, we may have to filter the key.
1038 Label update_each;
1039 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1040 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1041 __ j(equal, &update_each, Label::kNear);
1042
Ben Murdochda12d292016-06-02 14:46:10 +01001043 // We need to filter the key, record slow-path here.
1044 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001045 __ EmitLoadTypeFeedbackVector(edx);
1046 __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1047 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1048
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049 // Convert the entry to a string or null if it isn't a property
1050 // anymore. If the property has been removed while iterating, we
1051 // just skip it.
1052 __ push(ecx); // Enumerable.
1053 __ push(ebx); // Current entry.
1054 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001055 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001056 __ cmp(eax, isolate()->factory()->undefined_value());
1057 __ j(equal, loop_statement.continue_label());
1058 __ mov(ebx, eax);
1059
1060 // Update the 'each' property or variable from the possibly filtered
1061 // entry in register ebx.
1062 __ bind(&update_each);
1063 __ mov(result_register(), ebx);
1064 // Perform the assignment as if via '='.
1065 { EffectContext context(this);
1066 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001067 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001068 }
1069
1070 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001071 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001072 // Generate code for the body of the loop.
1073 Visit(stmt->body());
1074
1075 // Generate code for going to the next element by incrementing the
1076 // index (smi) stored on top of the stack.
1077 __ bind(loop_statement.continue_label());
1078 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1079
1080 EmitBackEdgeBookkeeping(stmt, &loop);
1081 __ jmp(&loop);
1082
1083 // Remove the pointers stored on the stack.
1084 __ bind(loop_statement.break_label());
Ben Murdochda12d292016-06-02 14:46:10 +01001085 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001086
1087 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001088 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001089 __ bind(&exit);
1090 decrement_loop_depth();
1091}
1092
1093
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001094void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1095 FeedbackVectorSlot slot) {
1096 DCHECK(NeedsHomeObject(initializer));
1097 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1098 __ mov(StoreDescriptor::NameRegister(),
1099 Immediate(isolate()->factory()->home_object_symbol()));
1100 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1101 EmitLoadStoreICSlot(slot);
1102 CallStoreIC();
1103}
1104
1105
1106void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1107 int offset,
1108 FeedbackVectorSlot slot) {
1109 DCHECK(NeedsHomeObject(initializer));
1110 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1111 __ mov(StoreDescriptor::NameRegister(),
1112 Immediate(isolate()->factory()->home_object_symbol()));
1113 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1114 EmitLoadStoreICSlot(slot);
1115 CallStoreIC();
1116}
1117
1118
1119void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1120 TypeofMode typeof_mode,
1121 Label* slow) {
1122 Register context = esi;
1123 Register temp = edx;
1124
1125 Scope* s = scope();
1126 while (s != NULL) {
1127 if (s->num_heap_slots() > 0) {
1128 if (s->calls_sloppy_eval()) {
1129 // Check that extension is "the hole".
1130 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1131 Heap::kTheHoleValueRootIndex, slow);
1132 }
1133 // Load next context in chain.
1134 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1135 // Walk the rest of the chain without clobbering esi.
1136 context = temp;
1137 }
1138 // If no outer scope calls eval, we do not need to check more
1139 // context extensions. If we have reached an eval scope, we check
1140 // all extensions from this point.
1141 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1142 s = s->outer_scope();
1143 }
1144
1145 if (s != NULL && s->is_eval_scope()) {
1146 // Loop up the context chain. There is no frame effect so it is
1147 // safe to use raw labels here.
1148 Label next, fast;
1149 if (!context.is(temp)) {
1150 __ mov(temp, context);
1151 }
1152 __ bind(&next);
1153 // Terminate at native context.
1154 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1155 Immediate(isolate()->factory()->native_context_map()));
1156 __ j(equal, &fast, Label::kNear);
1157 // Check that extension is "the hole".
1158 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1159 Heap::kTheHoleValueRootIndex, slow);
1160 // Load next context in chain.
1161 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1162 __ jmp(&next);
1163 __ bind(&fast);
1164 }
1165
1166 // All extension objects were empty and it is safe to use a normal global
1167 // load machinery.
1168 EmitGlobalVariableLoad(proxy, typeof_mode);
1169}
1170
1171
1172MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1173 Label* slow) {
1174 DCHECK(var->IsContextSlot());
1175 Register context = esi;
1176 Register temp = ebx;
1177
1178 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1179 if (s->num_heap_slots() > 0) {
1180 if (s->calls_sloppy_eval()) {
1181 // Check that extension is "the hole".
1182 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1183 Heap::kTheHoleValueRootIndex, slow);
1184 }
1185 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1186 // Walk the rest of the chain without clobbering esi.
1187 context = temp;
1188 }
1189 }
1190 // Check that last extension is "the hole".
1191 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1192 Heap::kTheHoleValueRootIndex, slow);
1193
1194 // This function is used only for loads, not stores, so it's safe to
1195 // return an esi-based operand (the write barrier cannot be allowed to
1196 // destroy the esi register).
1197 return ContextOperand(context, var->index());
1198}
1199
1200
1201void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1202 TypeofMode typeof_mode,
1203 Label* slow, Label* done) {
1204 // Generate fast-case code for variables that might be shadowed by
1205 // eval-introduced variables. Eval is used a lot without
1206 // introducing variables. In those cases, we do not want to
1207 // perform a runtime call for all variables in the scope
1208 // containing the eval.
1209 Variable* var = proxy->var();
1210 if (var->mode() == DYNAMIC_GLOBAL) {
1211 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1212 __ jmp(done);
1213 } else if (var->mode() == DYNAMIC_LOCAL) {
1214 Variable* local = var->local_if_not_shadowed();
1215 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001216 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001217 __ cmp(eax, isolate()->factory()->the_hole_value());
1218 __ j(not_equal, done);
Ben Murdochc5610432016-08-08 18:44:38 +01001219 __ push(Immediate(var->name()));
1220 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001221 }
1222 __ jmp(done);
1223 }
1224}
1225
1226
1227void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1228 TypeofMode typeof_mode) {
1229 Variable* var = proxy->var();
1230 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1231 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1232 __ mov(LoadDescriptor::ReceiverRegister(), NativeContextOperand());
1233 __ mov(LoadDescriptor::ReceiverRegister(),
1234 ContextOperand(LoadDescriptor::ReceiverRegister(),
1235 Context::EXTENSION_INDEX));
1236 __ mov(LoadDescriptor::NameRegister(), var->name());
1237 __ mov(LoadDescriptor::SlotRegister(),
1238 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1239 CallLoadIC(typeof_mode);
1240}
1241
1242
1243void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1244 TypeofMode typeof_mode) {
1245 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001246 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001247 Variable* var = proxy->var();
1248
1249 // Three cases: global variables, lookup variables, and all other types of
1250 // variables.
1251 switch (var->location()) {
1252 case VariableLocation::GLOBAL:
1253 case VariableLocation::UNALLOCATED: {
1254 Comment cmnt(masm_, "[ Global variable");
1255 EmitGlobalVariableLoad(proxy, typeof_mode);
1256 context()->Plug(eax);
1257 break;
1258 }
1259
1260 case VariableLocation::PARAMETER:
1261 case VariableLocation::LOCAL:
1262 case VariableLocation::CONTEXT: {
1263 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1264 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1265 : "[ Stack variable");
1266
1267 if (NeedsHoleCheckForLoad(proxy)) {
1268 // Let and const need a read barrier.
1269 Label done;
1270 GetVar(eax, var);
1271 __ cmp(eax, isolate()->factory()->the_hole_value());
1272 __ j(not_equal, &done, Label::kNear);
1273 if (var->mode() == LET || var->mode() == CONST) {
1274 // Throw a reference error when using an uninitialized let/const
1275 // binding in harmony mode.
1276 __ push(Immediate(var->name()));
1277 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001278 }
1279 __ bind(&done);
1280 context()->Plug(eax);
1281 break;
1282 }
1283 context()->Plug(var);
1284 break;
1285 }
1286
1287 case VariableLocation::LOOKUP: {
1288 Comment cmnt(masm_, "[ Lookup variable");
1289 Label done, slow;
1290 // Generate code for loading from variables potentially shadowed
1291 // by eval-introduced variables.
1292 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1293 __ bind(&slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001294 __ push(Immediate(var->name()));
1295 Runtime::FunctionId function_id =
1296 typeof_mode == NOT_INSIDE_TYPEOF
1297 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001298 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001299 __ CallRuntime(function_id);
1300 __ bind(&done);
1301 context()->Plug(eax);
1302 break;
1303 }
1304 }
1305}
1306
1307
1308void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1309 Comment cmnt(masm_, "[ RegExpLiteral");
1310 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1311 __ Move(eax, Immediate(Smi::FromInt(expr->literal_index())));
1312 __ Move(ecx, Immediate(expr->pattern()));
1313 __ Move(edx, Immediate(Smi::FromInt(expr->flags())));
1314 FastCloneRegExpStub stub(isolate());
1315 __ CallStub(&stub);
1316 context()->Plug(eax);
1317}
1318
1319
1320void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1321 Expression* expression = (property == NULL) ? NULL : property->value();
1322 if (expression == NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001323 PushOperand(isolate()->factory()->null_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001324 } else {
1325 VisitForStackValue(expression);
1326 if (NeedsHomeObject(expression)) {
1327 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1328 property->kind() == ObjectLiteral::Property::SETTER);
1329 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1330 EmitSetHomeObject(expression, offset, property->GetSlot());
1331 }
1332 }
1333}
1334
1335
1336void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1337 Comment cmnt(masm_, "[ ObjectLiteral");
1338
1339 Handle<FixedArray> constant_properties = expr->constant_properties();
1340 int flags = expr->ComputeFlags();
1341 // If any of the keys would store to the elements array, then we shouldn't
1342 // allow it.
1343 if (MustCreateObjectLiteralWithRuntime(expr)) {
1344 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1345 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1346 __ push(Immediate(constant_properties));
1347 __ push(Immediate(Smi::FromInt(flags)));
1348 __ CallRuntime(Runtime::kCreateObjectLiteral);
1349 } else {
1350 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1351 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1352 __ mov(ecx, Immediate(constant_properties));
1353 __ mov(edx, Immediate(Smi::FromInt(flags)));
1354 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1355 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001356 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001357 }
Ben Murdochc5610432016-08-08 18:44:38 +01001358 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001359
1360 // If result_saved is true the result is on top of the stack. If
1361 // result_saved is false the result is in eax.
1362 bool result_saved = false;
1363
1364 AccessorTable accessor_table(zone());
1365 int property_index = 0;
1366 for (; property_index < expr->properties()->length(); property_index++) {
1367 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1368 if (property->is_computed_name()) break;
1369 if (property->IsCompileTimeValue()) continue;
1370
1371 Literal* key = property->key()->AsLiteral();
1372 Expression* value = property->value();
1373 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001374 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001375 result_saved = true;
1376 }
1377 switch (property->kind()) {
1378 case ObjectLiteral::Property::CONSTANT:
1379 UNREACHABLE();
1380 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1381 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1382 // Fall through.
1383 case ObjectLiteral::Property::COMPUTED:
1384 // It is safe to use [[Put]] here because the boilerplate already
1385 // contains computed properties with an uninitialized value.
1386 if (key->value()->IsInternalizedString()) {
1387 if (property->emit_store()) {
1388 VisitForAccumulatorValue(value);
1389 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1390 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1391 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1392 EmitLoadStoreICSlot(property->GetSlot(0));
1393 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001394 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001395 if (NeedsHomeObject(value)) {
1396 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1397 }
1398 } else {
1399 VisitForEffect(value);
1400 }
1401 break;
1402 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001403 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001404 VisitForStackValue(key);
1405 VisitForStackValue(value);
1406 if (property->emit_store()) {
1407 if (NeedsHomeObject(value)) {
1408 EmitSetHomeObject(value, 2, property->GetSlot());
1409 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001410 PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1411 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001412 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001413 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001414 }
1415 break;
1416 case ObjectLiteral::Property::PROTOTYPE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001417 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 VisitForStackValue(value);
1419 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001420 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001421 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001422 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001423 break;
1424 case ObjectLiteral::Property::GETTER:
1425 if (property->emit_store()) {
1426 accessor_table.lookup(key)->second->getter = property;
1427 }
1428 break;
1429 case ObjectLiteral::Property::SETTER:
1430 if (property->emit_store()) {
1431 accessor_table.lookup(key)->second->setter = property;
1432 }
1433 break;
1434 }
1435 }
1436
1437 // Emit code to define accessors, using only a single call to the runtime for
1438 // each pair of corresponding getters and setters.
1439 for (AccessorTable::Iterator it = accessor_table.begin();
1440 it != accessor_table.end();
1441 ++it) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001442 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001443 VisitForStackValue(it->first);
1444
1445 EmitAccessor(it->second->getter);
1446 EmitAccessor(it->second->setter);
1447
Ben Murdoch097c5b22016-05-18 11:27:45 +01001448 PushOperand(Smi::FromInt(NONE));
1449 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001450 }
1451
1452 // Object literals have two parts. The "static" part on the left contains no
1453 // computed property names, and so we can compute its map ahead of time; see
1454 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1455 // starts with the first computed property name, and continues with all
1456 // properties to its right. All the code from above initializes the static
1457 // component of the object literal, and arranges for the map of the result to
1458 // reflect the static order in which the keys appear. For the dynamic
1459 // properties, we compile them into a series of "SetOwnProperty" runtime
1460 // calls. This will preserve insertion order.
1461 for (; property_index < expr->properties()->length(); property_index++) {
1462 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1463
1464 Expression* value = property->value();
1465 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001466 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001467 result_saved = true;
1468 }
1469
Ben Murdoch097c5b22016-05-18 11:27:45 +01001470 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001471
1472 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1473 DCHECK(!property->is_computed_name());
1474 VisitForStackValue(value);
1475 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001476 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001477 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001478 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001479 } else {
1480 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1481 VisitForStackValue(value);
1482 if (NeedsHomeObject(value)) {
1483 EmitSetHomeObject(value, 2, property->GetSlot());
1484 }
1485
1486 switch (property->kind()) {
1487 case ObjectLiteral::Property::CONSTANT:
1488 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1489 case ObjectLiteral::Property::COMPUTED:
1490 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001491 PushOperand(Smi::FromInt(NONE));
1492 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1493 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001494 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001495 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496 }
1497 break;
1498
1499 case ObjectLiteral::Property::PROTOTYPE:
1500 UNREACHABLE();
1501 break;
1502
1503 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001504 PushOperand(Smi::FromInt(NONE));
1505 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001506 break;
1507
1508 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001509 PushOperand(Smi::FromInt(NONE));
1510 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001511 break;
1512 }
1513 }
1514 }
1515
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 if (result_saved) {
1517 context()->PlugTOS();
1518 } else {
1519 context()->Plug(eax);
1520 }
1521}
1522
1523
1524void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1525 Comment cmnt(masm_, "[ ArrayLiteral");
1526
1527 Handle<FixedArray> constant_elements = expr->constant_elements();
1528 bool has_constant_fast_elements =
1529 IsFastObjectElementsKind(expr->constant_elements_kind());
1530
1531 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1532 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1533 // If the only customer of allocation sites is transitioning, then
1534 // we can turn it off if we don't have anywhere else to transition to.
1535 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1536 }
1537
1538 if (MustCreateArrayLiteralWithRuntime(expr)) {
1539 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1540 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1541 __ push(Immediate(constant_elements));
1542 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1543 __ CallRuntime(Runtime::kCreateArrayLiteral);
1544 } else {
1545 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1546 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1547 __ mov(ecx, Immediate(constant_elements));
1548 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1549 __ CallStub(&stub);
1550 }
Ben Murdochc5610432016-08-08 18:44:38 +01001551 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552
1553 bool result_saved = false; // Is the result saved to the stack?
1554 ZoneList<Expression*>* subexprs = expr->values();
1555 int length = subexprs->length();
1556
1557 // Emit code to evaluate all the non-constant subexpressions and to store
1558 // them into the newly cloned array.
1559 int array_index = 0;
1560 for (; array_index < length; array_index++) {
1561 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001562 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563
1564 // If the subexpression is a literal or a simple materialized literal it
1565 // is already set in the cloned array.
1566 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1567
1568 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001569 PushOperand(eax); // array literal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001570 result_saved = true;
1571 }
1572 VisitForAccumulatorValue(subexpr);
1573
1574 __ mov(StoreDescriptor::NameRegister(),
1575 Immediate(Smi::FromInt(array_index)));
1576 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1577 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1578 Handle<Code> ic =
1579 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1580 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01001581 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1582 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001583 }
1584
1585 // In case the array literal contains spread expressions it has two parts. The
1586 // first part is the "static" array which has a literal index is handled
1587 // above. The second part is the part after the first spread expression
1588 // (inclusive) and these elements gets appended to the array. Note that the
1589 // number elements an iterable produces is unknown ahead of time.
1590 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001591 PopOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592 result_saved = false;
1593 }
1594 for (; array_index < length; array_index++) {
1595 Expression* subexpr = subexprs->at(array_index);
1596
Ben Murdoch097c5b22016-05-18 11:27:45 +01001597 PushOperand(eax);
1598 DCHECK(!subexpr->IsSpread());
1599 VisitForStackValue(subexpr);
1600 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001601
Ben Murdochc5610432016-08-08 18:44:38 +01001602 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1603 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001604 }
1605
1606 if (result_saved) {
1607 context()->PlugTOS();
1608 } else {
1609 context()->Plug(eax);
1610 }
1611}
1612
1613
1614void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1615 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1616
1617 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001618
1619 Property* property = expr->target()->AsProperty();
1620 LhsKind assign_type = Property::GetAssignType(property);
1621
1622 // Evaluate LHS expression.
1623 switch (assign_type) {
1624 case VARIABLE:
1625 // Nothing to do here.
1626 break;
1627 case NAMED_SUPER_PROPERTY:
1628 VisitForStackValue(
1629 property->obj()->AsSuperPropertyReference()->this_var());
1630 VisitForAccumulatorValue(
1631 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001632 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001633 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001634 PushOperand(MemOperand(esp, kPointerSize));
1635 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001636 }
1637 break;
1638 case NAMED_PROPERTY:
1639 if (expr->is_compound()) {
1640 // We need the receiver both on the stack and in the register.
1641 VisitForStackValue(property->obj());
1642 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1643 } else {
1644 VisitForStackValue(property->obj());
1645 }
1646 break;
1647 case KEYED_SUPER_PROPERTY:
1648 VisitForStackValue(
1649 property->obj()->AsSuperPropertyReference()->this_var());
1650 VisitForStackValue(
1651 property->obj()->AsSuperPropertyReference()->home_object());
1652 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001653 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001654 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001655 PushOperand(MemOperand(esp, 2 * kPointerSize));
1656 PushOperand(MemOperand(esp, 2 * kPointerSize));
1657 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001658 }
1659 break;
1660 case KEYED_PROPERTY: {
1661 if (expr->is_compound()) {
1662 VisitForStackValue(property->obj());
1663 VisitForStackValue(property->key());
1664 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1665 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1666 } else {
1667 VisitForStackValue(property->obj());
1668 VisitForStackValue(property->key());
1669 }
1670 break;
1671 }
1672 }
1673
1674 // For compound assignments we need another deoptimization point after the
1675 // variable/property load.
1676 if (expr->is_compound()) {
1677 AccumulatorValueContext result_context(this);
1678 { AccumulatorValueContext left_operand_context(this);
1679 switch (assign_type) {
1680 case VARIABLE:
1681 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001682 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001683 break;
1684 case NAMED_SUPER_PROPERTY:
1685 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001686 PrepareForBailoutForId(property->LoadId(),
1687 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001688 break;
1689 case NAMED_PROPERTY:
1690 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001691 PrepareForBailoutForId(property->LoadId(),
1692 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693 break;
1694 case KEYED_SUPER_PROPERTY:
1695 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001696 PrepareForBailoutForId(property->LoadId(),
1697 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001698 break;
1699 case KEYED_PROPERTY:
1700 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001701 PrepareForBailoutForId(property->LoadId(),
1702 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001703 break;
1704 }
1705 }
1706
1707 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001708 PushOperand(eax); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 VisitForAccumulatorValue(expr->value());
1710
1711 if (ShouldInlineSmiCase(op)) {
1712 EmitInlineSmiBinaryOp(expr->binary_operation(),
1713 op,
1714 expr->target(),
1715 expr->value());
1716 } else {
1717 EmitBinaryOp(expr->binary_operation(), op);
1718 }
1719
1720 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001721 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001722 } else {
1723 VisitForAccumulatorValue(expr->value());
1724 }
1725
1726 SetExpressionPosition(expr);
1727
1728 // Store the value.
1729 switch (assign_type) {
1730 case VARIABLE:
1731 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1732 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001733 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001734 context()->Plug(eax);
1735 break;
1736 case NAMED_PROPERTY:
1737 EmitNamedPropertyAssignment(expr);
1738 break;
1739 case NAMED_SUPER_PROPERTY:
1740 EmitNamedSuperPropertyStore(property);
1741 context()->Plug(result_register());
1742 break;
1743 case KEYED_SUPER_PROPERTY:
1744 EmitKeyedSuperPropertyStore(property);
1745 context()->Plug(result_register());
1746 break;
1747 case KEYED_PROPERTY:
1748 EmitKeyedPropertyAssignment(expr);
1749 break;
1750 }
1751}
1752
1753
1754void FullCodeGenerator::VisitYield(Yield* expr) {
1755 Comment cmnt(masm_, "[ Yield");
1756 SetExpressionPosition(expr);
1757
1758 // Evaluate yielded value first; the initial iterator definition depends on
1759 // this. It stays on the stack while we update the iterator.
1760 VisitForStackValue(expr->expression());
1761
Ben Murdochc5610432016-08-08 18:44:38 +01001762 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001763
Ben Murdochda12d292016-06-02 14:46:10 +01001764 __ jmp(&suspend);
1765 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001766 // When we arrive here, eax holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001767 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001768 __ mov(ebx, FieldOperand(eax, JSGeneratorObject::kResumeModeOffset));
1769 __ mov(eax, FieldOperand(eax, JSGeneratorObject::kInputOffset));
1770 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1771 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1772 __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::kReturn)));
1773 __ j(less, &resume);
1774 __ Push(result_register());
1775 __ j(greater, &exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001776 EmitCreateIteratorResult(true);
1777 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001778
Ben Murdochc5610432016-08-08 18:44:38 +01001779 __ bind(&exception);
1780 __ CallRuntime(Runtime::kThrow);
1781
Ben Murdochda12d292016-06-02 14:46:10 +01001782 __ bind(&suspend);
1783 OperandStackDepthIncrement(1); // Not popped on this path.
1784 VisitForAccumulatorValue(expr->generator_object());
1785 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1786 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1787 Immediate(Smi::FromInt(continuation.pos())));
1788 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1789 __ mov(ecx, esi);
1790 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1791 kDontSaveFPRegs);
1792 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1793 __ cmp(esp, ebx);
1794 __ j(equal, &post_runtime);
1795 __ push(eax); // generator object
1796 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001797 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001798 __ bind(&post_runtime);
1799 PopOperand(result_register());
1800 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001801
Ben Murdochda12d292016-06-02 14:46:10 +01001802 __ bind(&resume);
1803 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001804}
1805
Ben Murdoch097c5b22016-05-18 11:27:45 +01001806void FullCodeGenerator::PushOperand(MemOperand operand) {
1807 OperandStackDepthIncrement(1);
1808 __ Push(operand);
1809}
1810
1811void FullCodeGenerator::EmitOperandStackDepthCheck() {
1812 if (FLAG_debug_code) {
1813 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1814 operand_stack_depth_ * kPointerSize;
1815 __ mov(eax, ebp);
1816 __ sub(eax, esp);
1817 __ cmp(eax, Immediate(expected_diff));
1818 __ Assert(equal, kUnexpectedStackDepth);
1819 }
1820}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001821
1822void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1823 Label allocate, done_allocate;
1824
Ben Murdochc5610432016-08-08 18:44:38 +01001825 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate,
1826 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827 __ jmp(&done_allocate, Label::kNear);
1828
1829 __ bind(&allocate);
1830 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1831 __ CallRuntime(Runtime::kAllocateInNewSpace);
1832
1833 __ bind(&done_allocate);
1834 __ mov(ebx, NativeContextOperand());
1835 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1836 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1837 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1838 isolate()->factory()->empty_fixed_array());
1839 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1840 isolate()->factory()->empty_fixed_array());
1841 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1842 __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1843 isolate()->factory()->ToBoolean(done));
1844 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
Ben Murdochda12d292016-06-02 14:46:10 +01001845 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001846}
1847
1848
1849void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1850 Token::Value op,
1851 Expression* left,
1852 Expression* right) {
1853 // Do combined smi check of the operands. Left operand is on the
1854 // stack. Right operand is in eax.
1855 Label smi_case, done, stub_call;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001856 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001857 __ mov(ecx, eax);
1858 __ or_(eax, edx);
1859 JumpPatchSite patch_site(masm_);
1860 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1861
1862 __ bind(&stub_call);
1863 __ mov(eax, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001864 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001865 CallIC(code, expr->BinaryOperationFeedbackId());
1866 patch_site.EmitPatchInfo();
1867 __ jmp(&done, Label::kNear);
1868
1869 // Smi case.
1870 __ bind(&smi_case);
1871 __ mov(eax, edx); // Copy left operand in case of a stub call.
1872
1873 switch (op) {
1874 case Token::SAR:
1875 __ SmiUntag(ecx);
1876 __ sar_cl(eax); // No checks of result necessary
1877 __ and_(eax, Immediate(~kSmiTagMask));
1878 break;
1879 case Token::SHL: {
1880 Label result_ok;
1881 __ SmiUntag(eax);
1882 __ SmiUntag(ecx);
1883 __ shl_cl(eax);
1884 // Check that the *signed* result fits in a smi.
1885 __ cmp(eax, 0xc0000000);
1886 __ j(positive, &result_ok);
1887 __ SmiTag(ecx);
1888 __ jmp(&stub_call);
1889 __ bind(&result_ok);
1890 __ SmiTag(eax);
1891 break;
1892 }
1893 case Token::SHR: {
1894 Label result_ok;
1895 __ SmiUntag(eax);
1896 __ SmiUntag(ecx);
1897 __ shr_cl(eax);
1898 __ test(eax, Immediate(0xc0000000));
1899 __ j(zero, &result_ok);
1900 __ SmiTag(ecx);
1901 __ jmp(&stub_call);
1902 __ bind(&result_ok);
1903 __ SmiTag(eax);
1904 break;
1905 }
1906 case Token::ADD:
1907 __ add(eax, ecx);
1908 __ j(overflow, &stub_call);
1909 break;
1910 case Token::SUB:
1911 __ sub(eax, ecx);
1912 __ j(overflow, &stub_call);
1913 break;
1914 case Token::MUL: {
1915 __ SmiUntag(eax);
1916 __ imul(eax, ecx);
1917 __ j(overflow, &stub_call);
1918 __ test(eax, eax);
1919 __ j(not_zero, &done, Label::kNear);
1920 __ mov(ebx, edx);
1921 __ or_(ebx, ecx);
1922 __ j(negative, &stub_call);
1923 break;
1924 }
1925 case Token::BIT_OR:
1926 __ or_(eax, ecx);
1927 break;
1928 case Token::BIT_AND:
1929 __ and_(eax, ecx);
1930 break;
1931 case Token::BIT_XOR:
1932 __ xor_(eax, ecx);
1933 break;
1934 default:
1935 UNREACHABLE();
1936 }
1937
1938 __ bind(&done);
1939 context()->Plug(eax);
1940}
1941
1942
1943void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001944 for (int i = 0; i < lit->properties()->length(); i++) {
1945 ObjectLiteral::Property* property = lit->properties()->at(i);
1946 Expression* value = property->value();
1947
1948 if (property->is_static()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001949 PushOperand(Operand(esp, kPointerSize)); // constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001950 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001951 PushOperand(Operand(esp, 0)); // prototype
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001952 }
1953 EmitPropertyKey(property, lit->GetIdForProperty(i));
1954
1955 // The static prototype property is read only. We handle the non computed
1956 // property name case in the parser. Since this is the only case where we
1957 // need to check for an own read only property we special case this so we do
1958 // not need to do this for every property.
1959 if (property->is_static() && property->is_computed_name()) {
1960 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1961 __ push(eax);
1962 }
1963
1964 VisitForStackValue(value);
1965 if (NeedsHomeObject(value)) {
1966 EmitSetHomeObject(value, 2, property->GetSlot());
1967 }
1968
1969 switch (property->kind()) {
1970 case ObjectLiteral::Property::CONSTANT:
1971 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1972 case ObjectLiteral::Property::PROTOTYPE:
1973 UNREACHABLE();
1974 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001975 PushOperand(Smi::FromInt(DONT_ENUM));
1976 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1977 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001978 break;
1979
1980 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001981 PushOperand(Smi::FromInt(DONT_ENUM));
1982 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001983 break;
1984
1985 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001986 PushOperand(Smi::FromInt(DONT_ENUM));
1987 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001988 break;
1989 }
1990 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001991}
1992
1993
1994void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001995 PopOperand(edx);
1996 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001997 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1998 CallIC(code, expr->BinaryOperationFeedbackId());
1999 patch_site.EmitPatchInfo();
2000 context()->Plug(eax);
2001}
2002
2003
2004void FullCodeGenerator::EmitAssignment(Expression* expr,
2005 FeedbackVectorSlot slot) {
2006 DCHECK(expr->IsValidReferenceExpressionOrThis());
2007
2008 Property* prop = expr->AsProperty();
2009 LhsKind assign_type = Property::GetAssignType(prop);
2010
2011 switch (assign_type) {
2012 case VARIABLE: {
2013 Variable* var = expr->AsVariableProxy()->var();
2014 EffectContext context(this);
2015 EmitVariableAssignment(var, Token::ASSIGN, slot);
2016 break;
2017 }
2018 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002019 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002020 VisitForAccumulatorValue(prop->obj());
2021 __ Move(StoreDescriptor::ReceiverRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002022 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002023 __ mov(StoreDescriptor::NameRegister(),
2024 prop->key()->AsLiteral()->value());
2025 EmitLoadStoreICSlot(slot);
2026 CallStoreIC();
2027 break;
2028 }
2029 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002030 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002031 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2032 VisitForAccumulatorValue(
2033 prop->obj()->AsSuperPropertyReference()->home_object());
2034 // stack: value, this; eax: home_object
2035 Register scratch = ecx;
2036 Register scratch2 = edx;
2037 __ mov(scratch, result_register()); // home_object
2038 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2039 __ mov(scratch2, MemOperand(esp, 0)); // this
2040 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2041 __ mov(MemOperand(esp, 0), scratch); // home_object
2042 // stack: this, home_object. eax: value
2043 EmitNamedSuperPropertyStore(prop);
2044 break;
2045 }
2046 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002047 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002048 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2049 VisitForStackValue(
2050 prop->obj()->AsSuperPropertyReference()->home_object());
2051 VisitForAccumulatorValue(prop->key());
2052 Register scratch = ecx;
2053 Register scratch2 = edx;
2054 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2055 // stack: value, this, home_object; eax: key, edx: value
2056 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2057 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2058 __ mov(scratch, MemOperand(esp, 0)); // home_object
2059 __ mov(MemOperand(esp, kPointerSize), scratch);
2060 __ mov(MemOperand(esp, 0), eax);
2061 __ mov(eax, scratch2);
2062 // stack: this, home_object, key; eax: value.
2063 EmitKeyedSuperPropertyStore(prop);
2064 break;
2065 }
2066 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002067 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002068 VisitForStackValue(prop->obj());
2069 VisitForAccumulatorValue(prop->key());
2070 __ Move(StoreDescriptor::NameRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002071 PopOperand(StoreDescriptor::ReceiverRegister()); // Receiver.
2072 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002073 EmitLoadStoreICSlot(slot);
2074 Handle<Code> ic =
2075 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2076 CallIC(ic);
2077 break;
2078 }
2079 }
2080 context()->Plug(eax);
2081}
2082
2083
2084void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2085 Variable* var, MemOperand location) {
2086 __ mov(location, eax);
2087 if (var->IsContextSlot()) {
2088 __ mov(edx, eax);
2089 int offset = Context::SlotOffset(var->index());
2090 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2091 }
2092}
2093
2094
2095void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2096 FeedbackVectorSlot slot) {
2097 if (var->IsUnallocated()) {
2098 // Global var, const, or let.
2099 __ mov(StoreDescriptor::NameRegister(), var->name());
2100 __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2101 __ mov(StoreDescriptor::ReceiverRegister(),
2102 ContextOperand(StoreDescriptor::ReceiverRegister(),
2103 Context::EXTENSION_INDEX));
2104 EmitLoadStoreICSlot(slot);
2105 CallStoreIC();
2106
2107 } else if (var->mode() == LET && op != Token::INIT) {
2108 // Non-initializing assignment to let variable needs a write barrier.
2109 DCHECK(!var->IsLookupSlot());
2110 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2111 Label assign;
2112 MemOperand location = VarOperand(var, ecx);
2113 __ mov(edx, location);
2114 __ cmp(edx, isolate()->factory()->the_hole_value());
2115 __ j(not_equal, &assign, Label::kNear);
2116 __ push(Immediate(var->name()));
2117 __ CallRuntime(Runtime::kThrowReferenceError);
2118 __ bind(&assign);
2119 EmitStoreToStackLocalOrContextSlot(var, location);
2120
2121 } else if (var->mode() == CONST && op != Token::INIT) {
2122 // Assignment to const variable needs a write barrier.
2123 DCHECK(!var->IsLookupSlot());
2124 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2125 Label const_error;
2126 MemOperand location = VarOperand(var, ecx);
2127 __ mov(edx, location);
2128 __ cmp(edx, isolate()->factory()->the_hole_value());
2129 __ j(not_equal, &const_error, Label::kNear);
2130 __ push(Immediate(var->name()));
2131 __ CallRuntime(Runtime::kThrowReferenceError);
2132 __ bind(&const_error);
2133 __ CallRuntime(Runtime::kThrowConstAssignError);
2134
2135 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2136 // Initializing assignment to const {this} needs a write barrier.
2137 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2138 Label uninitialized_this;
2139 MemOperand location = VarOperand(var, ecx);
2140 __ mov(edx, location);
2141 __ cmp(edx, isolate()->factory()->the_hole_value());
2142 __ j(equal, &uninitialized_this);
2143 __ push(Immediate(var->name()));
2144 __ CallRuntime(Runtime::kThrowReferenceError);
2145 __ bind(&uninitialized_this);
2146 EmitStoreToStackLocalOrContextSlot(var, location);
2147
Ben Murdochc5610432016-08-08 18:44:38 +01002148 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002149 if (var->IsLookupSlot()) {
2150 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002151 __ Push(Immediate(var->name()));
2152 __ Push(eax);
2153 __ CallRuntime(is_strict(language_mode())
2154 ? Runtime::kStoreLookupSlot_Strict
2155 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002156 } else {
2157 // Assignment to var or initializing assignment to let/const in harmony
2158 // mode.
2159 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2160 MemOperand location = VarOperand(var, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002161 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002162 // Check for an uninitialized let binding.
2163 __ mov(edx, location);
2164 __ cmp(edx, isolate()->factory()->the_hole_value());
2165 __ Check(equal, kLetBindingReInitialization);
2166 }
2167 EmitStoreToStackLocalOrContextSlot(var, location);
2168 }
2169
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002170 } else {
2171 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2172 if (is_strict(language_mode())) {
2173 __ CallRuntime(Runtime::kThrowConstAssignError);
2174 }
2175 // Silently ignore store in sloppy mode.
2176 }
2177}
2178
2179
2180void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2181 // Assignment to a property, using a named store IC.
2182 // eax : value
2183 // esp[0] : receiver
2184 Property* prop = expr->target()->AsProperty();
2185 DCHECK(prop != NULL);
2186 DCHECK(prop->key()->IsLiteral());
2187
2188 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002189 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002190 EmitLoadStoreICSlot(expr->AssignmentSlot());
2191 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01002192 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002193 context()->Plug(eax);
2194}
2195
2196
2197void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2198 // Assignment to named property of super.
2199 // eax : value
2200 // stack : receiver ('this'), home_object
2201 DCHECK(prop != NULL);
2202 Literal* key = prop->key()->AsLiteral();
2203 DCHECK(key != NULL);
2204
Ben Murdoch097c5b22016-05-18 11:27:45 +01002205 PushOperand(key->value());
2206 PushOperand(eax);
2207 CallRuntimeWithOperands(is_strict(language_mode())
2208 ? Runtime::kStoreToSuper_Strict
2209 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002210}
2211
2212
2213void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2214 // Assignment to named property of super.
2215 // eax : value
2216 // stack : receiver ('this'), home_object, key
2217
Ben Murdoch097c5b22016-05-18 11:27:45 +01002218 PushOperand(eax);
2219 CallRuntimeWithOperands(is_strict(language_mode())
2220 ? Runtime::kStoreKeyedToSuper_Strict
2221 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002222}
2223
2224
2225void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2226 // Assignment to a property, using a keyed store IC.
2227 // eax : value
2228 // esp[0] : key
2229 // esp[kPointerSize] : receiver
2230
Ben Murdoch097c5b22016-05-18 11:27:45 +01002231 PopOperand(StoreDescriptor::NameRegister()); // Key.
2232 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002233 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2234 Handle<Code> ic =
2235 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2236 EmitLoadStoreICSlot(expr->AssignmentSlot());
2237 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01002238 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002239 context()->Plug(eax);
2240}
2241
2242
2243void FullCodeGenerator::CallIC(Handle<Code> code,
2244 TypeFeedbackId ast_id) {
2245 ic_total_count_++;
2246 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2247}
2248
2249
2250// Code common for calls using the IC.
2251void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2252 Expression* callee = expr->expression();
2253
2254 // Get the target function.
2255 ConvertReceiverMode convert_mode;
2256 if (callee->IsVariableProxy()) {
2257 { StackValueContext context(this);
2258 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002259 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002260 }
2261 // Push undefined as receiver. This is patched in the method prologue if it
2262 // is a sloppy mode method.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002263 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002264 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2265 } else {
2266 // Load the function from the receiver.
2267 DCHECK(callee->IsProperty());
2268 DCHECK(!callee->AsProperty()->IsSuperAccess());
2269 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2270 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002271 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2272 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002273 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002274 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002275 __ mov(Operand(esp, kPointerSize), eax);
2276 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2277 }
2278
2279 EmitCall(expr, convert_mode);
2280}
2281
2282
2283void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2284 SetExpressionPosition(expr);
2285 Expression* callee = expr->expression();
2286 DCHECK(callee->IsProperty());
2287 Property* prop = callee->AsProperty();
2288 DCHECK(prop->IsSuperAccess());
2289
2290 Literal* key = prop->key()->AsLiteral();
2291 DCHECK(!key->value()->IsSmi());
2292 // Load the function from the receiver.
2293 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2294 VisitForStackValue(super_ref->home_object());
2295 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002296 PushOperand(eax);
2297 PushOperand(eax);
2298 PushOperand(Operand(esp, kPointerSize * 2));
2299 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002300 // Stack here:
2301 // - home_object
2302 // - this (receiver)
2303 // - this (receiver) <-- LoadFromSuper will pop here and below.
2304 // - home_object
2305 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002306 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002307 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002308
2309 // Replace home_object with target function.
2310 __ mov(Operand(esp, kPointerSize), eax);
2311
2312 // Stack here:
2313 // - target function
2314 // - this (receiver)
2315 EmitCall(expr);
2316}
2317
2318
2319// Code common for calls using the IC.
2320void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2321 Expression* key) {
2322 // Load the key.
2323 VisitForAccumulatorValue(key);
2324
2325 Expression* callee = expr->expression();
2326
2327 // Load the function from the receiver.
2328 DCHECK(callee->IsProperty());
2329 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2330 __ mov(LoadDescriptor::NameRegister(), eax);
2331 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002332 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2333 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002334
2335 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002336 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002337 __ mov(Operand(esp, kPointerSize), eax);
2338
2339 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2340}
2341
2342
2343void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2344 Expression* callee = expr->expression();
2345 DCHECK(callee->IsProperty());
2346 Property* prop = callee->AsProperty();
2347 DCHECK(prop->IsSuperAccess());
2348
2349 SetExpressionPosition(prop);
2350 // Load the function from the receiver.
2351 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2352 VisitForStackValue(super_ref->home_object());
2353 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002354 PushOperand(eax);
2355 PushOperand(eax);
2356 PushOperand(Operand(esp, kPointerSize * 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002357 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002358 // Stack here:
2359 // - home_object
2360 // - this (receiver)
2361 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2362 // - home_object
2363 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002364 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002365 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002366
2367 // Replace home_object with target function.
2368 __ mov(Operand(esp, kPointerSize), eax);
2369
2370 // Stack here:
2371 // - target function
2372 // - this (receiver)
2373 EmitCall(expr);
2374}
2375
2376
2377void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2378 // Load the arguments.
2379 ZoneList<Expression*>* args = expr->arguments();
2380 int arg_count = args->length();
2381 for (int i = 0; i < arg_count; i++) {
2382 VisitForStackValue(args->at(i));
2383 }
2384
Ben Murdochc5610432016-08-08 18:44:38 +01002385 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002386 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002387 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2388 if (FLAG_trace) {
2389 __ CallRuntime(Runtime::kTraceTailCall);
2390 }
2391 // Update profiling counters before the tail call since we will
2392 // not return to this function.
2393 EmitProfilingCounterHandlingForReturnSequence(true);
2394 }
2395 Handle<Code> ic =
2396 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2397 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002398 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2399 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2400 // Don't assign a type feedback id to the IC, since type feedback is provided
2401 // by the vector above.
2402 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002403 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002404
2405 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002406 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002407 context()->DropAndPlug(1, eax);
2408}
2409
Ben Murdochc5610432016-08-08 18:44:38 +01002410void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2411 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002412 // Push copy of the first argument or undefined if it doesn't exist.
2413 if (arg_count > 0) {
2414 __ push(Operand(esp, arg_count * kPointerSize));
2415 } else {
2416 __ push(Immediate(isolate()->factory()->undefined_value()));
2417 }
2418
2419 // Push the enclosing function.
2420 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2421
2422 // Push the language mode.
2423 __ push(Immediate(Smi::FromInt(language_mode())));
2424
2425 // Push the start position of the scope the calls resides in.
2426 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2427
Ben Murdochc5610432016-08-08 18:44:38 +01002428 // Push the source position of the eval call.
2429 __ push(Immediate(Smi::FromInt(expr->position())));
2430
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002431 // Do the runtime call.
2432 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2433}
2434
2435
2436// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2437void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2438 VariableProxy* callee = expr->expression()->AsVariableProxy();
2439 if (callee->var()->IsLookupSlot()) {
2440 Label slow, done;
2441 SetExpressionPosition(callee);
2442 // Generate code for loading from variables potentially shadowed by
2443 // eval-introduced variables.
2444 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2445
2446 __ bind(&slow);
2447 // Call the runtime to find the function to call (returned in eax) and
2448 // the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002449 __ Push(callee->name());
2450 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2451 PushOperand(eax); // Function.
2452 PushOperand(edx); // Receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002453 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002454
2455 // If fast case code has been generated, emit code to push the function
2456 // and receiver and have the slow path jump around this code.
2457 if (done.is_linked()) {
2458 Label call;
2459 __ jmp(&call, Label::kNear);
2460 __ bind(&done);
2461 // Push function.
2462 __ push(eax);
2463 // The receiver is implicitly the global receiver. Indicate this by
2464 // passing the hole to the call function stub.
2465 __ push(Immediate(isolate()->factory()->undefined_value()));
2466 __ bind(&call);
2467 }
2468 } else {
2469 VisitForStackValue(callee);
2470 // refEnv.WithBaseObject()
Ben Murdoch097c5b22016-05-18 11:27:45 +01002471 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002472 }
2473}
2474
2475
2476void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002477 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002478 // to resolve the function we need to call. Then we call the resolved
2479 // function using the given arguments.
2480 ZoneList<Expression*>* args = expr->arguments();
2481 int arg_count = args->length();
2482
2483 PushCalleeAndWithBaseObject(expr);
2484
2485 // Push the arguments.
2486 for (int i = 0; i < arg_count; i++) {
2487 VisitForStackValue(args->at(i));
2488 }
2489
2490 // Push a copy of the function (found below the arguments) and
2491 // resolve eval.
2492 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01002493 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002494
2495 // Touch up the stack with the resolved function.
2496 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2497
Ben Murdochc5610432016-08-08 18:44:38 +01002498 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002499
2500 SetCallPosition(expr);
2501 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2502 __ Set(eax, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002503 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2504 expr->tail_call_mode()),
2505 RelocInfo::CODE_TARGET);
2506 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002507 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002508 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002509 context()->DropAndPlug(1, eax);
2510}
2511
2512
2513void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2514 Comment cmnt(masm_, "[ CallNew");
2515 // According to ECMA-262, section 11.2.2, page 44, the function
2516 // expression in new calls must be evaluated before the
2517 // arguments.
2518
2519 // Push constructor on the stack. If it's not a function it's used as
2520 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2521 // ignored.
2522 DCHECK(!expr->expression()->IsSuperPropertyReference());
2523 VisitForStackValue(expr->expression());
2524
2525 // Push the arguments ("left-to-right") on the stack.
2526 ZoneList<Expression*>* args = expr->arguments();
2527 int arg_count = args->length();
2528 for (int i = 0; i < arg_count; i++) {
2529 VisitForStackValue(args->at(i));
2530 }
2531
2532 // Call the construct call builtin that handles allocation and
2533 // constructor invocation.
2534 SetConstructCallPosition(expr);
2535
2536 // Load function and argument count into edi and eax.
2537 __ Move(eax, Immediate(arg_count));
2538 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2539
2540 // Record call targets in unoptimized code.
2541 __ EmitLoadTypeFeedbackVector(ebx);
2542 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2543
2544 CallConstructStub stub(isolate());
2545 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002546 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002547 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2548 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002549 context()->Plug(eax);
2550}
2551
2552
2553void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2554 SuperCallReference* super_call_ref =
2555 expr->expression()->AsSuperCallReference();
2556 DCHECK_NOT_NULL(super_call_ref);
2557
2558 // Push the super constructor target on the stack (may be null,
2559 // but the Construct builtin can deal with that properly).
2560 VisitForAccumulatorValue(super_call_ref->this_function_var());
2561 __ AssertFunction(result_register());
2562 __ mov(result_register(),
2563 FieldOperand(result_register(), HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002564 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002565
2566 // Push the arguments ("left-to-right") on the stack.
2567 ZoneList<Expression*>* args = expr->arguments();
2568 int arg_count = args->length();
2569 for (int i = 0; i < arg_count; i++) {
2570 VisitForStackValue(args->at(i));
2571 }
2572
2573 // Call the construct call builtin that handles allocation and
2574 // constructor invocation.
2575 SetConstructCallPosition(expr);
2576
2577 // Load new target into edx.
2578 VisitForAccumulatorValue(super_call_ref->new_target_var());
2579 __ mov(edx, result_register());
2580
2581 // Load function and argument count into edi and eax.
2582 __ Move(eax, Immediate(arg_count));
2583 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2584
2585 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002586 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002587
2588 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002589 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002590 context()->Plug(eax);
2591}
2592
2593
2594void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2595 ZoneList<Expression*>* args = expr->arguments();
2596 DCHECK(args->length() == 1);
2597
2598 VisitForAccumulatorValue(args->at(0));
2599
2600 Label materialize_true, materialize_false;
2601 Label* if_true = NULL;
2602 Label* if_false = NULL;
2603 Label* fall_through = NULL;
2604 context()->PrepareTest(&materialize_true, &materialize_false,
2605 &if_true, &if_false, &fall_through);
2606
2607 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2608 __ test(eax, Immediate(kSmiTagMask));
2609 Split(zero, if_true, if_false, fall_through);
2610
2611 context()->Plug(if_true, if_false);
2612}
2613
2614
2615void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2616 ZoneList<Expression*>* args = expr->arguments();
2617 DCHECK(args->length() == 1);
2618
2619 VisitForAccumulatorValue(args->at(0));
2620
2621 Label materialize_true, materialize_false;
2622 Label* if_true = NULL;
2623 Label* if_false = NULL;
2624 Label* fall_through = NULL;
2625 context()->PrepareTest(&materialize_true, &materialize_false,
2626 &if_true, &if_false, &fall_through);
2627
2628 __ JumpIfSmi(eax, if_false);
2629 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2630 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2631 Split(above_equal, if_true, if_false, fall_through);
2632
2633 context()->Plug(if_true, if_false);
2634}
2635
2636
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002637void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2638 ZoneList<Expression*>* args = expr->arguments();
2639 DCHECK(args->length() == 1);
2640
2641 VisitForAccumulatorValue(args->at(0));
2642
2643 Label materialize_true, materialize_false;
2644 Label* if_true = NULL;
2645 Label* if_false = NULL;
2646 Label* fall_through = NULL;
2647 context()->PrepareTest(&materialize_true, &materialize_false,
2648 &if_true, &if_false, &fall_through);
2649
2650 __ JumpIfSmi(eax, if_false);
2651 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2652 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2653 Split(equal, if_true, if_false, fall_through);
2654
2655 context()->Plug(if_true, if_false);
2656}
2657
2658
2659void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2660 ZoneList<Expression*>* args = expr->arguments();
2661 DCHECK(args->length() == 1);
2662
2663 VisitForAccumulatorValue(args->at(0));
2664
2665 Label materialize_true, materialize_false;
2666 Label* if_true = NULL;
2667 Label* if_false = NULL;
2668 Label* fall_through = NULL;
2669 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2670 &if_false, &fall_through);
2671
2672 __ JumpIfSmi(eax, if_false);
2673 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2674 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2675 Split(equal, if_true, if_false, fall_through);
2676
2677 context()->Plug(if_true, if_false);
2678}
2679
2680
2681void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2682 ZoneList<Expression*>* args = expr->arguments();
2683 DCHECK(args->length() == 1);
2684
2685 VisitForAccumulatorValue(args->at(0));
2686
2687 Label materialize_true, materialize_false;
2688 Label* if_true = NULL;
2689 Label* if_false = NULL;
2690 Label* fall_through = NULL;
2691 context()->PrepareTest(&materialize_true, &materialize_false,
2692 &if_true, &if_false, &fall_through);
2693
2694 __ JumpIfSmi(eax, if_false);
2695 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2696 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2697 Split(equal, if_true, if_false, fall_through);
2698
2699 context()->Plug(if_true, if_false);
2700}
2701
2702
2703void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2704 ZoneList<Expression*>* args = expr->arguments();
2705 DCHECK(args->length() == 1);
2706
2707 VisitForAccumulatorValue(args->at(0));
2708
2709 Label materialize_true, materialize_false;
2710 Label* if_true = NULL;
2711 Label* if_false = NULL;
2712 Label* fall_through = NULL;
2713 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2714 &if_false, &fall_through);
2715
2716 __ JumpIfSmi(eax, if_false);
2717 __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2718 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2719 Split(equal, if_true, if_false, fall_through);
2720
2721 context()->Plug(if_true, if_false);
2722}
2723
2724
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002725void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2726 ZoneList<Expression*>* args = expr->arguments();
2727 DCHECK(args->length() == 1);
2728 Label done, null, function, non_function_constructor;
2729
2730 VisitForAccumulatorValue(args->at(0));
2731
2732 // If the object is not a JSReceiver, we return null.
2733 __ JumpIfSmi(eax, &null, Label::kNear);
2734 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2735 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2736 __ j(below, &null, Label::kNear);
2737
Ben Murdochda12d292016-06-02 14:46:10 +01002738 // Return 'Function' for JSFunction and JSBoundFunction objects.
2739 __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
2740 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2741 __ j(above_equal, &function, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002742
2743 // Check if the constructor in the map is a JS function.
2744 __ GetMapConstructor(eax, eax, ebx);
2745 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2746 __ j(not_equal, &non_function_constructor, Label::kNear);
2747
2748 // eax now contains the constructor function. Grab the
2749 // instance class name from there.
2750 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2751 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2752 __ jmp(&done, Label::kNear);
2753
2754 // Non-JS objects have class null.
2755 __ bind(&null);
2756 __ mov(eax, isolate()->factory()->null_value());
2757 __ jmp(&done, Label::kNear);
2758
2759 // Functions have class 'Function'.
2760 __ bind(&function);
2761 __ mov(eax, isolate()->factory()->Function_string());
2762 __ jmp(&done, Label::kNear);
2763
2764 // Objects with a non-function constructor have class 'Object'.
2765 __ bind(&non_function_constructor);
2766 __ mov(eax, isolate()->factory()->Object_string());
2767
2768 // All done.
2769 __ bind(&done);
2770
2771 context()->Plug(eax);
2772}
2773
2774
2775void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2776 ZoneList<Expression*>* args = expr->arguments();
2777 DCHECK(args->length() == 1);
2778
2779 VisitForAccumulatorValue(args->at(0)); // Load the object.
2780
2781 Label done;
2782 // If the object is a smi return the object.
2783 __ JumpIfSmi(eax, &done, Label::kNear);
2784 // If the object is not a value type, return the object.
2785 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2786 __ j(not_equal, &done, Label::kNear);
2787 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2788
2789 __ bind(&done);
2790 context()->Plug(eax);
2791}
2792
2793
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002794void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2795 ZoneList<Expression*>* args = expr->arguments();
2796 DCHECK_EQ(3, args->length());
2797
2798 Register string = eax;
2799 Register index = ebx;
2800 Register value = ecx;
2801
2802 VisitForStackValue(args->at(0)); // index
2803 VisitForStackValue(args->at(1)); // value
2804 VisitForAccumulatorValue(args->at(2)); // string
2805
Ben Murdoch097c5b22016-05-18 11:27:45 +01002806 PopOperand(value);
2807 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002808
2809 if (FLAG_debug_code) {
2810 __ test(value, Immediate(kSmiTagMask));
2811 __ Check(zero, kNonSmiValue);
2812 __ test(index, Immediate(kSmiTagMask));
2813 __ Check(zero, kNonSmiValue);
2814 }
2815
2816 __ SmiUntag(value);
2817 __ SmiUntag(index);
2818
2819 if (FLAG_debug_code) {
2820 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2821 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
2822 }
2823
2824 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
2825 value);
2826 context()->Plug(string);
2827}
2828
2829
2830void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2831 ZoneList<Expression*>* args = expr->arguments();
2832 DCHECK_EQ(3, args->length());
2833
2834 Register string = eax;
2835 Register index = ebx;
2836 Register value = ecx;
2837
2838 VisitForStackValue(args->at(0)); // index
2839 VisitForStackValue(args->at(1)); // value
2840 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002841 PopOperand(value);
2842 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002843
2844 if (FLAG_debug_code) {
2845 __ test(value, Immediate(kSmiTagMask));
2846 __ Check(zero, kNonSmiValue);
2847 __ test(index, Immediate(kSmiTagMask));
2848 __ Check(zero, kNonSmiValue);
2849 __ SmiUntag(index);
2850 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2851 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
2852 __ SmiTag(index);
2853 }
2854
2855 __ SmiUntag(value);
2856 // No need to untag a smi for two-byte addressing.
2857 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
2858 value);
2859 context()->Plug(string);
2860}
2861
2862
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002863void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2864 ZoneList<Expression*>* args = expr->arguments();
2865 DCHECK(args->length() == 1);
2866
2867 VisitForAccumulatorValue(args->at(0));
2868
2869 Label done;
2870 StringCharFromCodeGenerator generator(eax, ebx);
2871 generator.GenerateFast(masm_);
2872 __ jmp(&done);
2873
2874 NopRuntimeCallHelper call_helper;
2875 generator.GenerateSlow(masm_, call_helper);
2876
2877 __ bind(&done);
2878 context()->Plug(ebx);
2879}
2880
2881
2882void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2883 ZoneList<Expression*>* args = expr->arguments();
2884 DCHECK(args->length() == 2);
2885
2886 VisitForStackValue(args->at(0));
2887 VisitForAccumulatorValue(args->at(1));
2888
2889 Register object = ebx;
2890 Register index = eax;
2891 Register result = edx;
2892
Ben Murdoch097c5b22016-05-18 11:27:45 +01002893 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002894
2895 Label need_conversion;
2896 Label index_out_of_range;
2897 Label done;
2898 StringCharCodeAtGenerator generator(object,
2899 index,
2900 result,
2901 &need_conversion,
2902 &need_conversion,
2903 &index_out_of_range,
2904 STRING_INDEX_IS_NUMBER);
2905 generator.GenerateFast(masm_);
2906 __ jmp(&done);
2907
2908 __ bind(&index_out_of_range);
2909 // When the index is out of range, the spec requires us to return
2910 // NaN.
2911 __ Move(result, Immediate(isolate()->factory()->nan_value()));
2912 __ jmp(&done);
2913
2914 __ bind(&need_conversion);
2915 // Move the undefined value into the result register, which will
2916 // trigger conversion.
2917 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
2918 __ jmp(&done);
2919
2920 NopRuntimeCallHelper call_helper;
2921 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2922
2923 __ bind(&done);
2924 context()->Plug(result);
2925}
2926
2927
2928void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
2929 ZoneList<Expression*>* args = expr->arguments();
2930 DCHECK(args->length() == 2);
2931
2932 VisitForStackValue(args->at(0));
2933 VisitForAccumulatorValue(args->at(1));
2934
2935 Register object = ebx;
2936 Register index = eax;
2937 Register scratch = edx;
2938 Register result = eax;
2939
Ben Murdoch097c5b22016-05-18 11:27:45 +01002940 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002941
2942 Label need_conversion;
2943 Label index_out_of_range;
2944 Label done;
2945 StringCharAtGenerator generator(object,
2946 index,
2947 scratch,
2948 result,
2949 &need_conversion,
2950 &need_conversion,
2951 &index_out_of_range,
2952 STRING_INDEX_IS_NUMBER);
2953 generator.GenerateFast(masm_);
2954 __ jmp(&done);
2955
2956 __ bind(&index_out_of_range);
2957 // When the index is out of range, the spec requires us to return
2958 // the empty string.
2959 __ Move(result, Immediate(isolate()->factory()->empty_string()));
2960 __ jmp(&done);
2961
2962 __ bind(&need_conversion);
2963 // Move smi zero into the result register, which will trigger
2964 // conversion.
2965 __ Move(result, Immediate(Smi::FromInt(0)));
2966 __ jmp(&done);
2967
2968 NopRuntimeCallHelper call_helper;
2969 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2970
2971 __ bind(&done);
2972 context()->Plug(result);
2973}
2974
2975
2976void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2977 ZoneList<Expression*>* args = expr->arguments();
2978 DCHECK_LE(2, args->length());
2979 // Push target, receiver and arguments onto the stack.
2980 for (Expression* const arg : *args) {
2981 VisitForStackValue(arg);
2982 }
Ben Murdochc5610432016-08-08 18:44:38 +01002983 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002984 // Move target to edi.
2985 int const argc = args->length() - 2;
2986 __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
2987 // Call the target.
2988 __ mov(eax, Immediate(argc));
2989 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002990 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002991 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002992 // Discard the function left on TOS.
2993 context()->DropAndPlug(1, eax);
2994}
2995
2996
2997void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2998 ZoneList<Expression*>* args = expr->arguments();
2999 DCHECK(args->length() == 1);
3000
3001 VisitForAccumulatorValue(args->at(0));
3002
3003 __ AssertString(eax);
3004
3005 Label materialize_true, materialize_false;
3006 Label* if_true = NULL;
3007 Label* if_false = NULL;
3008 Label* fall_through = NULL;
3009 context()->PrepareTest(&materialize_true, &materialize_false,
3010 &if_true, &if_false, &fall_through);
3011
3012 __ test(FieldOperand(eax, String::kHashFieldOffset),
3013 Immediate(String::kContainsCachedArrayIndexMask));
3014 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3015 Split(zero, if_true, if_false, fall_through);
3016
3017 context()->Plug(if_true, if_false);
3018}
3019
3020
3021void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3022 ZoneList<Expression*>* args = expr->arguments();
3023 DCHECK(args->length() == 1);
3024 VisitForAccumulatorValue(args->at(0));
3025
3026 __ AssertString(eax);
3027
3028 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3029 __ IndexFromHash(eax, eax);
3030
3031 context()->Plug(eax);
3032}
3033
3034
3035void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3036 ZoneList<Expression*>* args = expr->arguments();
3037 DCHECK_EQ(1, args->length());
3038 VisitForAccumulatorValue(args->at(0));
3039 __ AssertFunction(eax);
3040 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3041 __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
3042 context()->Plug(eax);
3043}
3044
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003045void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3046 DCHECK(expr->arguments()->length() == 0);
3047 ExternalReference debug_is_active =
3048 ExternalReference::debug_is_active_address(isolate());
3049 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
3050 __ SmiTag(eax);
3051 context()->Plug(eax);
3052}
3053
3054
3055void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3056 ZoneList<Expression*>* args = expr->arguments();
3057 DCHECK_EQ(2, args->length());
3058 VisitForStackValue(args->at(0));
3059 VisitForStackValue(args->at(1));
3060
3061 Label runtime, done;
3062
Ben Murdochc5610432016-08-08 18:44:38 +01003063 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime,
3064 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003065 __ mov(ebx, NativeContextOperand());
3066 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
3067 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
3068 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3069 isolate()->factory()->empty_fixed_array());
3070 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3071 isolate()->factory()->empty_fixed_array());
3072 __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
3073 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
3074 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3075 __ jmp(&done, Label::kNear);
3076
3077 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003078 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003079
3080 __ bind(&done);
3081 context()->Plug(eax);
3082}
3083
3084
3085void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003086 // Push function.
3087 __ LoadGlobalFunction(expr->context_index(), eax);
3088 PushOperand(eax);
3089
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003090 // Push undefined as receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003091 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003092}
3093
3094
3095void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3096 ZoneList<Expression*>* args = expr->arguments();
3097 int arg_count = args->length();
3098
3099 SetCallPosition(expr);
3100 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3101 __ Set(eax, arg_count);
3102 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3103 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003104 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003105 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003106}
3107
3108
3109void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3110 switch (expr->op()) {
3111 case Token::DELETE: {
3112 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3113 Property* property = expr->expression()->AsProperty();
3114 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3115
3116 if (property != NULL) {
3117 VisitForStackValue(property->obj());
3118 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003119 CallRuntimeWithOperands(is_strict(language_mode())
3120 ? Runtime::kDeleteProperty_Strict
3121 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003122 context()->Plug(eax);
3123 } else if (proxy != NULL) {
3124 Variable* var = proxy->var();
3125 // Delete of an unqualified identifier is disallowed in strict mode but
3126 // "delete this" is allowed.
3127 bool is_this = var->HasThisName(isolate());
3128 DCHECK(is_sloppy(language_mode()) || is_this);
3129 if (var->IsUnallocatedOrGlobalSlot()) {
3130 __ mov(eax, NativeContextOperand());
3131 __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
3132 __ push(Immediate(var->name()));
3133 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3134 context()->Plug(eax);
3135 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3136 // Result of deleting non-global variables is false. 'this' is
3137 // not really a variable, though we implement it as one. The
3138 // subexpression does not have side effects.
3139 context()->Plug(is_this);
3140 } else {
3141 // Non-global variable. Call the runtime to try to delete from the
3142 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003143 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003144 __ CallRuntime(Runtime::kDeleteLookupSlot);
3145 context()->Plug(eax);
3146 }
3147 } else {
3148 // Result of deleting non-property, non-variable reference is true.
3149 // The subexpression may have side effects.
3150 VisitForEffect(expr->expression());
3151 context()->Plug(true);
3152 }
3153 break;
3154 }
3155
3156 case Token::VOID: {
3157 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3158 VisitForEffect(expr->expression());
3159 context()->Plug(isolate()->factory()->undefined_value());
3160 break;
3161 }
3162
3163 case Token::NOT: {
3164 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3165 if (context()->IsEffect()) {
3166 // Unary NOT has no side effects so it's only necessary to visit the
3167 // subexpression. Match the optimizing compiler by not branching.
3168 VisitForEffect(expr->expression());
3169 } else if (context()->IsTest()) {
3170 const TestContext* test = TestContext::cast(context());
3171 // The labels are swapped for the recursive call.
3172 VisitForControl(expr->expression(),
3173 test->false_label(),
3174 test->true_label(),
3175 test->fall_through());
3176 context()->Plug(test->true_label(), test->false_label());
3177 } else {
3178 // We handle value contexts explicitly rather than simply visiting
3179 // for control and plugging the control flow into the context,
3180 // because we need to prepare a pair of extra administrative AST ids
3181 // for the optimizing compiler.
3182 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3183 Label materialize_true, materialize_false, done;
3184 VisitForControl(expr->expression(),
3185 &materialize_false,
3186 &materialize_true,
3187 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003188 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003189 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003190 PrepareForBailoutForId(expr->MaterializeTrueId(),
3191 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003192 if (context()->IsAccumulatorValue()) {
3193 __ mov(eax, isolate()->factory()->true_value());
3194 } else {
3195 __ Push(isolate()->factory()->true_value());
3196 }
3197 __ jmp(&done, Label::kNear);
3198 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003199 PrepareForBailoutForId(expr->MaterializeFalseId(),
3200 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003201 if (context()->IsAccumulatorValue()) {
3202 __ mov(eax, isolate()->factory()->false_value());
3203 } else {
3204 __ Push(isolate()->factory()->false_value());
3205 }
3206 __ bind(&done);
3207 }
3208 break;
3209 }
3210
3211 case Token::TYPEOF: {
3212 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3213 {
3214 AccumulatorValueContext context(this);
3215 VisitForTypeofValue(expr->expression());
3216 }
3217 __ mov(ebx, eax);
3218 TypeofStub typeof_stub(isolate());
3219 __ CallStub(&typeof_stub);
3220 context()->Plug(eax);
3221 break;
3222 }
3223
3224 default:
3225 UNREACHABLE();
3226 }
3227}
3228
3229
3230void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3231 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3232
3233 Comment cmnt(masm_, "[ CountOperation");
3234
3235 Property* prop = expr->expression()->AsProperty();
3236 LhsKind assign_type = Property::GetAssignType(prop);
3237
3238 // Evaluate expression and get value.
3239 if (assign_type == VARIABLE) {
3240 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3241 AccumulatorValueContext context(this);
3242 EmitVariableLoad(expr->expression()->AsVariableProxy());
3243 } else {
3244 // Reserve space for result of postfix operation.
3245 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003246 PushOperand(Smi::FromInt(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003247 }
3248 switch (assign_type) {
3249 case NAMED_PROPERTY: {
3250 // Put the object both on the stack and in the register.
3251 VisitForStackValue(prop->obj());
3252 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
3253 EmitNamedPropertyLoad(prop);
3254 break;
3255 }
3256
3257 case NAMED_SUPER_PROPERTY: {
3258 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3259 VisitForAccumulatorValue(
3260 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003261 PushOperand(result_register());
3262 PushOperand(MemOperand(esp, kPointerSize));
3263 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003264 EmitNamedSuperPropertyLoad(prop);
3265 break;
3266 }
3267
3268 case KEYED_SUPER_PROPERTY: {
3269 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3270 VisitForStackValue(
3271 prop->obj()->AsSuperPropertyReference()->home_object());
3272 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003273 PushOperand(result_register());
3274 PushOperand(MemOperand(esp, 2 * kPointerSize));
3275 PushOperand(MemOperand(esp, 2 * kPointerSize));
3276 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003277 EmitKeyedSuperPropertyLoad(prop);
3278 break;
3279 }
3280
3281 case KEYED_PROPERTY: {
3282 VisitForStackValue(prop->obj());
3283 VisitForStackValue(prop->key());
3284 __ mov(LoadDescriptor::ReceiverRegister(),
3285 Operand(esp, kPointerSize)); // Object.
3286 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
3287 EmitKeyedPropertyLoad(prop);
3288 break;
3289 }
3290
3291 case VARIABLE:
3292 UNREACHABLE();
3293 }
3294 }
3295
3296 // We need a second deoptimization point after loading the value
3297 // in case evaluating the property load my have a side effect.
3298 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003299 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003300 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003301 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003302 }
3303
3304 // Inline smi case if we are in a loop.
3305 Label done, stub_call;
3306 JumpPatchSite patch_site(masm_);
3307 if (ShouldInlineSmiCase(expr->op())) {
3308 Label slow;
3309 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
3310
3311 // Save result for postfix expressions.
3312 if (expr->is_postfix()) {
3313 if (!context()->IsEffect()) {
3314 // Save the result on the stack. If we have a named or keyed property
3315 // we store the result under the receiver that is currently on top
3316 // of the stack.
3317 switch (assign_type) {
3318 case VARIABLE:
3319 __ push(eax);
3320 break;
3321 case NAMED_PROPERTY:
3322 __ mov(Operand(esp, kPointerSize), eax);
3323 break;
3324 case NAMED_SUPER_PROPERTY:
3325 __ mov(Operand(esp, 2 * kPointerSize), eax);
3326 break;
3327 case KEYED_PROPERTY:
3328 __ mov(Operand(esp, 2 * kPointerSize), eax);
3329 break;
3330 case KEYED_SUPER_PROPERTY:
3331 __ mov(Operand(esp, 3 * kPointerSize), eax);
3332 break;
3333 }
3334 }
3335 }
3336
3337 if (expr->op() == Token::INC) {
3338 __ add(eax, Immediate(Smi::FromInt(1)));
3339 } else {
3340 __ sub(eax, Immediate(Smi::FromInt(1)));
3341 }
3342 __ j(no_overflow, &done, Label::kNear);
3343 // Call stub. Undo operation first.
3344 if (expr->op() == Token::INC) {
3345 __ sub(eax, Immediate(Smi::FromInt(1)));
3346 } else {
3347 __ add(eax, Immediate(Smi::FromInt(1)));
3348 }
3349 __ jmp(&stub_call, Label::kNear);
3350 __ bind(&slow);
3351 }
Ben Murdochda12d292016-06-02 14:46:10 +01003352
3353 // Convert old value into a number.
3354 ToNumberStub convert_stub(isolate());
3355 __ CallStub(&convert_stub);
Ben Murdochc5610432016-08-08 18:44:38 +01003356 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003357
3358 // Save result for postfix expressions.
3359 if (expr->is_postfix()) {
3360 if (!context()->IsEffect()) {
3361 // Save the result on the stack. If we have a named or keyed property
3362 // we store the result under the receiver that is currently on top
3363 // of the stack.
3364 switch (assign_type) {
3365 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003366 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003367 break;
3368 case NAMED_PROPERTY:
3369 __ mov(Operand(esp, kPointerSize), eax);
3370 break;
3371 case NAMED_SUPER_PROPERTY:
3372 __ mov(Operand(esp, 2 * kPointerSize), eax);
3373 break;
3374 case KEYED_PROPERTY:
3375 __ mov(Operand(esp, 2 * kPointerSize), eax);
3376 break;
3377 case KEYED_SUPER_PROPERTY:
3378 __ mov(Operand(esp, 3 * kPointerSize), eax);
3379 break;
3380 }
3381 }
3382 }
3383
3384 SetExpressionPosition(expr);
3385
3386 // Call stub for +1/-1.
3387 __ bind(&stub_call);
3388 __ mov(edx, eax);
3389 __ mov(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003390 Handle<Code> code =
3391 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003392 CallIC(code, expr->CountBinOpFeedbackId());
3393 patch_site.EmitPatchInfo();
3394 __ bind(&done);
3395
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003396 // Store the value returned in eax.
3397 switch (assign_type) {
3398 case VARIABLE:
3399 if (expr->is_postfix()) {
3400 // Perform the assignment as if via '='.
3401 { EffectContext context(this);
3402 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3403 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003404 PrepareForBailoutForId(expr->AssignmentId(),
3405 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003406 context.Plug(eax);
3407 }
3408 // For all contexts except EffectContext We have the result on
3409 // top of the stack.
3410 if (!context()->IsEffect()) {
3411 context()->PlugTOS();
3412 }
3413 } else {
3414 // Perform the assignment as if via '='.
3415 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3416 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003417 PrepareForBailoutForId(expr->AssignmentId(),
3418 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003419 context()->Plug(eax);
3420 }
3421 break;
3422 case NAMED_PROPERTY: {
3423 __ mov(StoreDescriptor::NameRegister(),
3424 prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003425 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003426 EmitLoadStoreICSlot(expr->CountSlot());
3427 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003428 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003429 if (expr->is_postfix()) {
3430 if (!context()->IsEffect()) {
3431 context()->PlugTOS();
3432 }
3433 } else {
3434 context()->Plug(eax);
3435 }
3436 break;
3437 }
3438 case NAMED_SUPER_PROPERTY: {
3439 EmitNamedSuperPropertyStore(prop);
3440 if (expr->is_postfix()) {
3441 if (!context()->IsEffect()) {
3442 context()->PlugTOS();
3443 }
3444 } else {
3445 context()->Plug(eax);
3446 }
3447 break;
3448 }
3449 case KEYED_SUPER_PROPERTY: {
3450 EmitKeyedSuperPropertyStore(prop);
3451 if (expr->is_postfix()) {
3452 if (!context()->IsEffect()) {
3453 context()->PlugTOS();
3454 }
3455 } else {
3456 context()->Plug(eax);
3457 }
3458 break;
3459 }
3460 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003461 PopOperand(StoreDescriptor::NameRegister());
3462 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003463 Handle<Code> ic =
3464 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3465 EmitLoadStoreICSlot(expr->CountSlot());
3466 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003467 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003468 if (expr->is_postfix()) {
3469 // Result is on the stack
3470 if (!context()->IsEffect()) {
3471 context()->PlugTOS();
3472 }
3473 } else {
3474 context()->Plug(eax);
3475 }
3476 break;
3477 }
3478 }
3479}
3480
3481
3482void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3483 Expression* sub_expr,
3484 Handle<String> check) {
3485 Label materialize_true, materialize_false;
3486 Label* if_true = NULL;
3487 Label* if_false = NULL;
3488 Label* fall_through = NULL;
3489 context()->PrepareTest(&materialize_true, &materialize_false,
3490 &if_true, &if_false, &fall_through);
3491
3492 { AccumulatorValueContext context(this);
3493 VisitForTypeofValue(sub_expr);
3494 }
3495 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3496
3497 Factory* factory = isolate()->factory();
3498 if (String::Equals(check, factory->number_string())) {
3499 __ JumpIfSmi(eax, if_true);
3500 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3501 isolate()->factory()->heap_number_map());
3502 Split(equal, if_true, if_false, fall_through);
3503 } else if (String::Equals(check, factory->string_string())) {
3504 __ JumpIfSmi(eax, if_false);
3505 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3506 Split(below, if_true, if_false, fall_through);
3507 } else if (String::Equals(check, factory->symbol_string())) {
3508 __ JumpIfSmi(eax, if_false);
3509 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3510 Split(equal, if_true, if_false, fall_through);
3511 } else if (String::Equals(check, factory->boolean_string())) {
3512 __ cmp(eax, isolate()->factory()->true_value());
3513 __ j(equal, if_true);
3514 __ cmp(eax, isolate()->factory()->false_value());
3515 Split(equal, if_true, if_false, fall_through);
3516 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003517 __ cmp(eax, isolate()->factory()->null_value());
3518 __ j(equal, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003519 __ JumpIfSmi(eax, if_false);
3520 // Check for undetectable objects => true.
3521 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3522 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01003523 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003524 Split(not_zero, if_true, if_false, fall_through);
3525 } else if (String::Equals(check, factory->function_string())) {
3526 __ JumpIfSmi(eax, if_false);
3527 // Check for callable and not undetectable objects => true.
3528 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3529 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3530 __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3531 __ cmp(ecx, 1 << Map::kIsCallable);
3532 Split(equal, if_true, if_false, fall_through);
3533 } else if (String::Equals(check, factory->object_string())) {
3534 __ JumpIfSmi(eax, if_false);
3535 __ cmp(eax, isolate()->factory()->null_value());
3536 __ j(equal, if_true);
3537 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3538 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3539 __ j(below, if_false);
3540 // Check for callable or undetectable objects => false.
3541 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01003542 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003543 Split(zero, if_true, if_false, fall_through);
3544// clang-format off
3545#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3546 } else if (String::Equals(check, factory->type##_string())) { \
3547 __ JumpIfSmi(eax, if_false); \
3548 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \
3549 isolate()->factory()->type##_map()); \
3550 Split(equal, if_true, if_false, fall_through);
3551 SIMD128_TYPES(SIMD128_TYPE)
3552#undef SIMD128_TYPE
3553 // clang-format on
3554 } else {
3555 if (if_false != fall_through) __ jmp(if_false);
3556 }
3557 context()->Plug(if_true, if_false);
3558}
3559
3560
3561void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3562 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003563
3564 // First we try a fast inlined version of the compare when one of
3565 // the operands is a literal.
3566 if (TryLiteralCompare(expr)) return;
3567
3568 // Always perform the comparison for its control flow. Pack the result
3569 // into the expression's context after the comparison is performed.
3570 Label materialize_true, materialize_false;
3571 Label* if_true = NULL;
3572 Label* if_false = NULL;
3573 Label* fall_through = NULL;
3574 context()->PrepareTest(&materialize_true, &materialize_false,
3575 &if_true, &if_false, &fall_through);
3576
3577 Token::Value op = expr->op();
3578 VisitForStackValue(expr->left());
3579 switch (op) {
3580 case Token::IN:
3581 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003582 SetExpressionPosition(expr);
3583 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003584 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3585 __ cmp(eax, isolate()->factory()->true_value());
3586 Split(equal, if_true, if_false, fall_through);
3587 break;
3588
3589 case Token::INSTANCEOF: {
3590 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003591 SetExpressionPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003592 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003593 InstanceOfStub stub(isolate());
3594 __ CallStub(&stub);
3595 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3596 __ cmp(eax, isolate()->factory()->true_value());
3597 Split(equal, if_true, if_false, fall_through);
3598 break;
3599 }
3600
3601 default: {
3602 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003603 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003604 Condition cc = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003605 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003606
3607 bool inline_smi_code = ShouldInlineSmiCase(op);
3608 JumpPatchSite patch_site(masm_);
3609 if (inline_smi_code) {
3610 Label slow_case;
3611 __ mov(ecx, edx);
3612 __ or_(ecx, eax);
3613 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3614 __ cmp(edx, eax);
3615 Split(cc, if_true, if_false, NULL);
3616 __ bind(&slow_case);
3617 }
3618
Ben Murdoch097c5b22016-05-18 11:27:45 +01003619 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003620 CallIC(ic, expr->CompareOperationFeedbackId());
3621 patch_site.EmitPatchInfo();
3622
3623 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3624 __ test(eax, eax);
3625 Split(cc, if_true, if_false, fall_through);
3626 }
3627 }
3628
3629 // Convert the result of the comparison into one expected for this
3630 // expression's context.
3631 context()->Plug(if_true, if_false);
3632}
3633
3634
3635void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3636 Expression* sub_expr,
3637 NilValue nil) {
3638 Label materialize_true, materialize_false;
3639 Label* if_true = NULL;
3640 Label* if_false = NULL;
3641 Label* fall_through = NULL;
3642 context()->PrepareTest(&materialize_true, &materialize_false,
3643 &if_true, &if_false, &fall_through);
3644
3645 VisitForAccumulatorValue(sub_expr);
3646 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3647
3648 Handle<Object> nil_value = nil == kNullValue
3649 ? isolate()->factory()->null_value()
3650 : isolate()->factory()->undefined_value();
3651 if (expr->op() == Token::EQ_STRICT) {
3652 __ cmp(eax, nil_value);
3653 Split(equal, if_true, if_false, fall_through);
3654 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003655 __ JumpIfSmi(eax, if_false);
3656 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3657 __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
3658 Immediate(1 << Map::kIsUndetectable));
3659 Split(not_zero, if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003660 }
3661 context()->Plug(if_true, if_false);
3662}
3663
3664
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003665Register FullCodeGenerator::result_register() {
3666 return eax;
3667}
3668
3669
3670Register FullCodeGenerator::context_register() {
3671 return esi;
3672}
3673
Ben Murdochda12d292016-06-02 14:46:10 +01003674void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3675 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3676 __ mov(value, Operand(ebp, frame_offset));
3677}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003678
3679void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3680 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3681 __ mov(Operand(ebp, frame_offset), value);
3682}
3683
3684
3685void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3686 __ mov(dst, ContextOperand(esi, context_index));
3687}
3688
3689
3690void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3691 Scope* closure_scope = scope()->ClosureScope();
3692 if (closure_scope->is_script_scope() ||
3693 closure_scope->is_module_scope()) {
3694 // Contexts nested in the native context have a canonical empty function
3695 // as their closure, not the anonymous closure containing the global
3696 // code.
3697 __ mov(eax, NativeContextOperand());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003698 PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003699 } else if (closure_scope->is_eval_scope()) {
3700 // Contexts nested inside eval code have the same closure as the context
3701 // calling eval, not the anonymous closure containing the eval code.
3702 // Fetch it from the context.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003703 PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003704 } else {
3705 DCHECK(closure_scope->is_function_scope());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003706 PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003707 }
3708}
3709
3710
3711// ----------------------------------------------------------------------------
3712// Non-local control flow support.
3713
3714void FullCodeGenerator::EnterFinallyBlock() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003715 // Store pending message while executing finally block.
3716 ExternalReference pending_message_obj =
3717 ExternalReference::address_of_pending_message_obj(isolate());
3718 __ mov(edx, Operand::StaticVariable(pending_message_obj));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003719 PushOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003720
3721 ClearPendingMessage();
3722}
3723
3724
3725void FullCodeGenerator::ExitFinallyBlock() {
3726 DCHECK(!result_register().is(edx));
3727 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003728 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003729 ExternalReference pending_message_obj =
3730 ExternalReference::address_of_pending_message_obj(isolate());
3731 __ mov(Operand::StaticVariable(pending_message_obj), edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003732}
3733
3734
3735void FullCodeGenerator::ClearPendingMessage() {
3736 DCHECK(!result_register().is(edx));
3737 ExternalReference pending_message_obj =
3738 ExternalReference::address_of_pending_message_obj(isolate());
3739 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
3740 __ mov(Operand::StaticVariable(pending_message_obj), edx);
3741}
3742
3743
Ben Murdoch097c5b22016-05-18 11:27:45 +01003744void FullCodeGenerator::DeferredCommands::EmitCommands() {
3745 DCHECK(!result_register().is(edx));
3746 __ Pop(result_register()); // Restore the accumulator.
3747 __ Pop(edx); // Get the token.
3748 for (DeferredCommand cmd : commands_) {
3749 Label skip;
3750 __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
3751 __ j(not_equal, &skip);
3752 switch (cmd.command) {
3753 case kReturn:
3754 codegen_->EmitUnwindAndReturn();
3755 break;
3756 case kThrow:
3757 __ Push(result_register());
3758 __ CallRuntime(Runtime::kReThrow);
3759 break;
3760 case kContinue:
3761 codegen_->EmitContinue(cmd.target);
3762 break;
3763 case kBreak:
3764 codegen_->EmitBreak(cmd.target);
3765 break;
3766 }
3767 __ bind(&skip);
3768 }
3769}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003770
3771#undef __
3772
3773
3774static const byte kJnsInstruction = 0x79;
3775static const byte kJnsOffset = 0x11;
3776static const byte kNopByteOne = 0x66;
3777static const byte kNopByteTwo = 0x90;
3778#ifdef DEBUG
3779static const byte kCallInstruction = 0xe8;
3780#endif
3781
3782
3783void BackEdgeTable::PatchAt(Code* unoptimized_code,
3784 Address pc,
3785 BackEdgeState target_state,
3786 Code* replacement_code) {
3787 Address call_target_address = pc - kIntSize;
3788 Address jns_instr_address = call_target_address - 3;
3789 Address jns_offset_address = call_target_address - 2;
3790
3791 switch (target_state) {
3792 case INTERRUPT:
3793 // sub <profiling_counter>, <delta> ;; Not changed
3794 // jns ok
3795 // call <interrupt stub>
3796 // ok:
3797 *jns_instr_address = kJnsInstruction;
3798 *jns_offset_address = kJnsOffset;
3799 break;
3800 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003801 // sub <profiling_counter>, <delta> ;; Not changed
3802 // nop
3803 // nop
3804 // call <on-stack replacment>
3805 // ok:
3806 *jns_instr_address = kNopByteOne;
3807 *jns_offset_address = kNopByteTwo;
3808 break;
3809 }
3810
3811 Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3812 call_target_address, unoptimized_code,
3813 replacement_code->entry());
3814 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3815 unoptimized_code, call_target_address, replacement_code);
3816}
3817
3818
3819BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3820 Isolate* isolate,
3821 Code* unoptimized_code,
3822 Address pc) {
3823 Address call_target_address = pc - kIntSize;
3824 Address jns_instr_address = call_target_address - 3;
3825 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3826
3827 if (*jns_instr_address == kJnsInstruction) {
3828 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3829 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3830 Assembler::target_address_at(call_target_address,
3831 unoptimized_code));
3832 return INTERRUPT;
3833 }
3834
3835 DCHECK_EQ(kNopByteOne, *jns_instr_address);
3836 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3837
Ben Murdochda12d292016-06-02 14:46:10 +01003838 DCHECK_EQ(
3839 isolate->builtins()->OnStackReplacement()->entry(),
3840 Assembler::target_address_at(call_target_address, unoptimized_code));
3841 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003842}
3843
3844
3845} // namespace internal
3846} // namespace v8
3847
3848#endif // V8_TARGET_ARCH_IA32