blob: f1945c897cf2e2fed0849e69e9330e467446bcdf [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_IA32
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ia32/frames-ia32.h"
14#include "src/ic/ic.h"
15#include "src/parsing/parser.h"
16
17namespace v8 {
18namespace internal {
19
Ben Murdoch097c5b22016-05-18 11:27:45 +010020#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021
22class JumpPatchSite BASE_EMBEDDED {
23 public:
24 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
25#ifdef DEBUG
26 info_emitted_ = false;
27#endif
28 }
29
30 ~JumpPatchSite() {
31 DCHECK(patch_site_.is_bound() == info_emitted_);
32 }
33
34 void EmitJumpIfNotSmi(Register reg,
35 Label* target,
36 Label::Distance distance = Label::kFar) {
37 __ test(reg, Immediate(kSmiTagMask));
38 EmitJump(not_carry, target, distance); // Always taken before patched.
39 }
40
41 void EmitJumpIfSmi(Register reg,
42 Label* target,
43 Label::Distance distance = Label::kFar) {
44 __ test(reg, Immediate(kSmiTagMask));
45 EmitJump(carry, target, distance); // Never taken before patched.
46 }
47
48 void EmitPatchInfo() {
49 if (patch_site_.is_bound()) {
50 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
51 DCHECK(is_uint8(delta_to_patch_site));
52 __ test(eax, Immediate(delta_to_patch_site));
53#ifdef DEBUG
54 info_emitted_ = true;
55#endif
56 } else {
57 __ nop(); // Signals no inlined code.
58 }
59 }
60
61 private:
62 // jc will be patched with jz, jnc will become jnz.
63 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
64 DCHECK(!patch_site_.is_bound() && !info_emitted_);
65 DCHECK(cc == carry || cc == not_carry);
66 __ bind(&patch_site_);
67 __ j(cc, target, distance);
68 }
69
Ben Murdoch097c5b22016-05-18 11:27:45 +010070 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 MacroAssembler* masm_;
72 Label patch_site_;
73#ifdef DEBUG
74 bool info_emitted_;
75#endif
76};
77
78
79// Generate code for a JS function. On entry to the function the receiver
80// and arguments have been pushed on the stack left to right, with the
81// return address on top of them. The actual argument count matches the
82// formal parameter count expected by the function.
83//
84// The live registers are:
85// o edi: the JS function object being called (i.e. ourselves)
86// o edx: the new target value
87// o esi: our context
88// o ebp: our caller's frame pointer
89// o esp: stack pointer (pointing to return address)
90//
91// The function builds a JS frame. Please see JavaScriptFrameConstants in
92// frames-ia32.h for its layout.
93void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
95 profiling_counter_ = isolate()->factory()->NewCell(
96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97 SetFunctionPosition(literal());
98 Comment cmnt(masm_, "[ function compiled by full code generator");
99
100 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
103 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
104 __ mov(ecx, Operand(esp, receiver_offset));
105 __ AssertNotSmi(ecx);
106 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
107 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
108 }
109
110 // Open a frame scope to indicate that there is a frame on the stack. The
111 // MANUAL indicates that the scope shouldn't actually generate code to set up
112 // the frame (that is done below).
113 FrameScope frame_scope(masm_, StackFrame::MANUAL);
114
115 info->set_prologue_offset(masm_->pc_offset());
116 __ Prologue(info->GeneratePreagedPrologue());
117
118 { Comment cmnt(masm_, "[ Allocate locals");
119 int locals_count = info->scope()->num_stack_slots();
120 // Generators allocate locals, if any, in context slots.
121 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100122 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 if (locals_count == 1) {
124 __ push(Immediate(isolate()->factory()->undefined_value()));
125 } else if (locals_count > 1) {
126 if (locals_count >= 128) {
127 Label ok;
128 __ mov(ecx, esp);
129 __ sub(ecx, Immediate(locals_count * kPointerSize));
130 ExternalReference stack_limit =
131 ExternalReference::address_of_real_stack_limit(isolate());
132 __ cmp(ecx, Operand::StaticVariable(stack_limit));
133 __ j(above_equal, &ok, Label::kNear);
134 __ CallRuntime(Runtime::kThrowStackOverflow);
135 __ bind(&ok);
136 }
137 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
138 const int kMaxPushes = 32;
139 if (locals_count >= kMaxPushes) {
140 int loop_iterations = locals_count / kMaxPushes;
141 __ mov(ecx, loop_iterations);
142 Label loop_header;
143 __ bind(&loop_header);
144 // Do pushes.
145 for (int i = 0; i < kMaxPushes; i++) {
146 __ push(eax);
147 }
148 __ dec(ecx);
149 __ j(not_zero, &loop_header, Label::kNear);
150 }
151 int remaining = locals_count % kMaxPushes;
152 // Emit the remaining pushes.
153 for (int i = 0; i < remaining; i++) {
154 __ push(eax);
155 }
156 }
157 }
158
159 bool function_in_register = true;
160
161 // Possibly allocate a local context.
162 if (info->scope()->num_heap_slots() > 0) {
163 Comment cmnt(masm_, "[ Allocate context");
164 bool need_write_barrier = true;
165 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
166 // Argument to NewContext is the function, which is still in edi.
167 if (info->scope()->is_script_scope()) {
168 __ push(edi);
169 __ Push(info->scope()->GetScopeInfo(info->isolate()));
170 __ CallRuntime(Runtime::kNewScriptContext);
171 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
172 // The new target value is not used, clobbering is safe.
173 DCHECK_NULL(info->scope()->new_target_var());
174 } else {
175 if (info->scope()->new_target_var() != nullptr) {
176 __ push(edx); // Preserve new target.
177 }
178 if (slots <= FastNewContextStub::kMaximumSlots) {
179 FastNewContextStub stub(isolate(), slots);
180 __ CallStub(&stub);
181 // Result of FastNewContextStub is always in new space.
182 need_write_barrier = false;
183 } else {
184 __ push(edi);
185 __ CallRuntime(Runtime::kNewFunctionContext);
186 }
187 if (info->scope()->new_target_var() != nullptr) {
188 __ pop(edx); // Restore new target.
189 }
190 }
191 function_in_register = false;
192 // Context is returned in eax. It replaces the context passed to us.
193 // It's saved in the stack and kept live in esi.
194 __ mov(esi, eax);
195 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
196
197 // Copy parameters into context if necessary.
198 int num_parameters = info->scope()->num_parameters();
199 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
200 for (int i = first_parameter; i < num_parameters; i++) {
201 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
202 if (var->IsContextSlot()) {
203 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204 (num_parameters - 1 - i) * kPointerSize;
205 // Load parameter from stack.
206 __ mov(eax, Operand(ebp, parameter_offset));
207 // Store it in the context.
208 int context_offset = Context::SlotOffset(var->index());
209 __ mov(Operand(esi, context_offset), eax);
210 // Update the write barrier. This clobbers eax and ebx.
211 if (need_write_barrier) {
212 __ RecordWriteContextSlot(esi,
213 context_offset,
214 eax,
215 ebx,
216 kDontSaveFPRegs);
217 } else if (FLAG_debug_code) {
218 Label done;
219 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
220 __ Abort(kExpectedNewSpaceObject);
221 __ bind(&done);
222 }
223 }
224 }
225 }
226
227 // Register holding this function and new target are both trashed in case we
228 // bailout here. But since that can happen only when new target is not used
229 // and we allocate a context, the value of |function_in_register| is correct.
230 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
231
232 // Possibly set up a local binding to the this function which is used in
233 // derived constructors with super calls.
234 Variable* this_function_var = scope()->this_function_var();
235 if (this_function_var != nullptr) {
236 Comment cmnt(masm_, "[ This function");
237 if (!function_in_register) {
238 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
239 // The write barrier clobbers register again, keep it marked as such.
240 }
241 SetVar(this_function_var, edi, ebx, ecx);
242 }
243
244 // Possibly set up a local binding to the new target value.
245 Variable* new_target_var = scope()->new_target_var();
246 if (new_target_var != nullptr) {
247 Comment cmnt(masm_, "[ new.target");
248 SetVar(new_target_var, edx, ebx, ecx);
249 }
250
251 // Possibly allocate RestParameters
252 int rest_index;
253 Variable* rest_param = scope()->rest_parameter(&rest_index);
254 if (rest_param) {
255 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100256 if (!function_in_register) {
257 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
258 }
259 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000260 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100261 function_in_register = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262 SetVar(rest_param, eax, ebx, edx);
263 }
264
265 Variable* arguments = scope()->arguments();
266 if (arguments != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100267 // Arguments object must be allocated after the context object, in
268 // case the "arguments" or ".arguments" variables are in the context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000269 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 if (!function_in_register) {
271 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
272 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100273 if (is_strict(language_mode()) || !has_simple_parameters()) {
274 FastNewStrictArgumentsStub stub(isolate());
275 __ CallStub(&stub);
276 } else if (literal()->has_duplicate_parameters()) {
277 __ Push(edi);
278 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
279 } else {
280 FastNewSloppyArgumentsStub stub(isolate());
281 __ CallStub(&stub);
282 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283
284 SetVar(arguments, eax, ebx, edx);
285 }
286
287 if (FLAG_trace) {
288 __ CallRuntime(Runtime::kTraceEnter);
289 }
290
Ben Murdochda12d292016-06-02 14:46:10 +0100291 // Visit the declarations and body.
292 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
293 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100295 VisitDeclarations(scope()->declarations());
296 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000297
Ben Murdochda12d292016-06-02 14:46:10 +0100298 // Assert that the declarations do not use ICs. Otherwise the debugger
299 // won't be able to redirect a PC at an IC to the correct IC in newly
300 // recompiled code.
301 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302
Ben Murdochda12d292016-06-02 14:46:10 +0100303 {
304 Comment cmnt(masm_, "[ Stack check");
305 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
306 Label ok;
307 ExternalReference stack_limit =
308 ExternalReference::address_of_stack_limit(isolate());
309 __ cmp(esp, Operand::StaticVariable(stack_limit));
310 __ j(above_equal, &ok, Label::kNear);
311 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
312 __ bind(&ok);
313 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000314
Ben Murdochda12d292016-06-02 14:46:10 +0100315 {
316 Comment cmnt(masm_, "[ Body");
317 DCHECK(loop_depth() == 0);
318 VisitStatements(literal()->body());
319 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000320 }
321
322 // Always emit a 'return undefined' in case control fell off the end of
323 // the body.
324 { Comment cmnt(masm_, "[ return <undefined>;");
325 __ mov(eax, isolate()->factory()->undefined_value());
326 EmitReturnSequence();
327 }
328}
329
330
331void FullCodeGenerator::ClearAccumulator() {
332 __ Move(eax, Immediate(Smi::FromInt(0)));
333}
334
335
336void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
337 __ mov(ebx, Immediate(profiling_counter_));
338 __ sub(FieldOperand(ebx, Cell::kValueOffset),
339 Immediate(Smi::FromInt(delta)));
340}
341
342
343void FullCodeGenerator::EmitProfilingCounterReset() {
344 int reset_value = FLAG_interrupt_budget;
345 __ mov(ebx, Immediate(profiling_counter_));
346 __ mov(FieldOperand(ebx, Cell::kValueOffset),
347 Immediate(Smi::FromInt(reset_value)));
348}
349
350
351void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
352 Label* back_edge_target) {
353 Comment cmnt(masm_, "[ Back edge bookkeeping");
354 Label ok;
355
356 DCHECK(back_edge_target->is_bound());
357 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
358 int weight = Min(kMaxBackEdgeWeight,
359 Max(1, distance / kCodeSizeMultiplier));
360 EmitProfilingCounterDecrement(weight);
361 __ j(positive, &ok, Label::kNear);
362 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
363
364 // Record a mapping of this PC offset to the OSR id. This is used to find
365 // the AST id from the unoptimized code in order to use it as a key into
366 // the deoptimization input data found in the optimized code.
367 RecordBackEdge(stmt->OsrEntryId());
368
369 EmitProfilingCounterReset();
370
371 __ bind(&ok);
372 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
373 // Record a mapping of the OSR id to this PC. This is used if the OSR
374 // entry becomes the target of a bailout. We don't expect it to be, but
375 // we want it to work if it is.
376 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
377}
378
Ben Murdoch097c5b22016-05-18 11:27:45 +0100379void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
380 bool is_tail_call) {
381 // Pretend that the exit is a backwards jump to the entry.
382 int weight = 1;
383 if (info_->ShouldSelfOptimize()) {
384 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
385 } else {
386 int distance = masm_->pc_offset();
387 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
388 }
389 EmitProfilingCounterDecrement(weight);
390 Label ok;
391 __ j(positive, &ok, Label::kNear);
392 // Don't need to save result register if we are going to do a tail call.
393 if (!is_tail_call) {
394 __ push(eax);
395 }
396 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
397 if (!is_tail_call) {
398 __ pop(eax);
399 }
400 EmitProfilingCounterReset();
401 __ bind(&ok);
402}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000403
404void FullCodeGenerator::EmitReturnSequence() {
405 Comment cmnt(masm_, "[ Return sequence");
406 if (return_label_.is_bound()) {
407 __ jmp(&return_label_);
408 } else {
409 // Common return label
410 __ bind(&return_label_);
411 if (FLAG_trace) {
412 __ push(eax);
413 __ CallRuntime(Runtime::kTraceExit);
414 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100415 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000416
417 SetReturnPosition(literal());
418 __ leave();
419
420 int arg_count = info_->scope()->num_parameters() + 1;
421 int arguments_bytes = arg_count * kPointerSize;
422 __ Ret(arguments_bytes, ecx);
423 }
424}
425
426
427void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
428 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
429 MemOperand operand = codegen()->VarOperand(var, result_register());
430 // Memory operands can be pushed directly.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100431 codegen()->PushOperand(operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000432}
433
434
435void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
436 UNREACHABLE(); // Not used on IA32.
437}
438
439
440void FullCodeGenerator::AccumulatorValueContext::Plug(
441 Heap::RootListIndex index) const {
442 UNREACHABLE(); // Not used on IA32.
443}
444
445
446void FullCodeGenerator::StackValueContext::Plug(
447 Heap::RootListIndex index) const {
448 UNREACHABLE(); // Not used on IA32.
449}
450
451
452void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
453 UNREACHABLE(); // Not used on IA32.
454}
455
456
457void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
458}
459
460
461void FullCodeGenerator::AccumulatorValueContext::Plug(
462 Handle<Object> lit) const {
463 if (lit->IsSmi()) {
464 __ SafeMove(result_register(), Immediate(lit));
465 } else {
466 __ Move(result_register(), Immediate(lit));
467 }
468}
469
470
471void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100472 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000473 if (lit->IsSmi()) {
474 __ SafePush(Immediate(lit));
475 } else {
476 __ push(Immediate(lit));
477 }
478}
479
480
481void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
482 codegen()->PrepareForBailoutBeforeSplit(condition(),
483 true,
484 true_label_,
485 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100486 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
488 if (false_label_ != fall_through_) __ jmp(false_label_);
489 } else if (lit->IsTrue() || lit->IsJSObject()) {
490 if (true_label_ != fall_through_) __ jmp(true_label_);
491 } else if (lit->IsString()) {
492 if (String::cast(*lit)->length() == 0) {
493 if (false_label_ != fall_through_) __ jmp(false_label_);
494 } else {
495 if (true_label_ != fall_through_) __ jmp(true_label_);
496 }
497 } else if (lit->IsSmi()) {
498 if (Smi::cast(*lit)->value() == 0) {
499 if (false_label_ != fall_through_) __ jmp(false_label_);
500 } else {
501 if (true_label_ != fall_through_) __ jmp(true_label_);
502 }
503 } else {
504 // For simplicity we always test the accumulator register.
505 __ mov(result_register(), lit);
506 codegen()->DoTest(this);
507 }
508}
509
510
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000511void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
512 Register reg) const {
513 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100514 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000515 __ mov(Operand(esp, 0), reg);
516}
517
518
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
520 Label* materialize_false) const {
521 DCHECK(materialize_true == materialize_false);
522 __ bind(materialize_true);
523}
524
525
526void FullCodeGenerator::AccumulatorValueContext::Plug(
527 Label* materialize_true,
528 Label* materialize_false) const {
529 Label done;
530 __ bind(materialize_true);
531 __ mov(result_register(), isolate()->factory()->true_value());
532 __ jmp(&done, Label::kNear);
533 __ bind(materialize_false);
534 __ mov(result_register(), isolate()->factory()->false_value());
535 __ bind(&done);
536}
537
538
539void FullCodeGenerator::StackValueContext::Plug(
540 Label* materialize_true,
541 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100542 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000543 Label done;
544 __ bind(materialize_true);
545 __ push(Immediate(isolate()->factory()->true_value()));
546 __ jmp(&done, Label::kNear);
547 __ bind(materialize_false);
548 __ push(Immediate(isolate()->factory()->false_value()));
549 __ bind(&done);
550}
551
552
553void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
554 Label* materialize_false) const {
555 DCHECK(materialize_true == true_label_);
556 DCHECK(materialize_false == false_label_);
557}
558
559
560void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
561 Handle<Object> value = flag
562 ? isolate()->factory()->true_value()
563 : isolate()->factory()->false_value();
564 __ mov(result_register(), value);
565}
566
567
568void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100569 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000570 Handle<Object> value = flag
571 ? isolate()->factory()->true_value()
572 : isolate()->factory()->false_value();
573 __ push(Immediate(value));
574}
575
576
577void FullCodeGenerator::TestContext::Plug(bool flag) const {
578 codegen()->PrepareForBailoutBeforeSplit(condition(),
579 true,
580 true_label_,
581 false_label_);
582 if (flag) {
583 if (true_label_ != fall_through_) __ jmp(true_label_);
584 } else {
585 if (false_label_ != fall_through_) __ jmp(false_label_);
586 }
587}
588
589
590void FullCodeGenerator::DoTest(Expression* condition,
591 Label* if_true,
592 Label* if_false,
593 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100594 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000595 CallIC(ic, condition->test_id());
596 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
597 Split(equal, if_true, if_false, fall_through);
598}
599
600
601void FullCodeGenerator::Split(Condition cc,
602 Label* if_true,
603 Label* if_false,
604 Label* fall_through) {
605 if (if_false == fall_through) {
606 __ j(cc, if_true);
607 } else if (if_true == fall_through) {
608 __ j(NegateCondition(cc), if_false);
609 } else {
610 __ j(cc, if_true);
611 __ jmp(if_false);
612 }
613}
614
615
616MemOperand FullCodeGenerator::StackOperand(Variable* var) {
617 DCHECK(var->IsStackAllocated());
618 // Offset is negative because higher indexes are at lower addresses.
619 int offset = -var->index() * kPointerSize;
620 // Adjust by a (parameter or local) base offset.
621 if (var->IsParameter()) {
622 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
623 } else {
624 offset += JavaScriptFrameConstants::kLocal0Offset;
625 }
626 return Operand(ebp, offset);
627}
628
629
630MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
631 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
632 if (var->IsContextSlot()) {
633 int context_chain_length = scope()->ContextChainLength(var->scope());
634 __ LoadContext(scratch, context_chain_length);
635 return ContextOperand(scratch, var->index());
636 } else {
637 return StackOperand(var);
638 }
639}
640
641
642void FullCodeGenerator::GetVar(Register dest, Variable* var) {
643 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
644 MemOperand location = VarOperand(var, dest);
645 __ mov(dest, location);
646}
647
648
649void FullCodeGenerator::SetVar(Variable* var,
650 Register src,
651 Register scratch0,
652 Register scratch1) {
653 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
654 DCHECK(!scratch0.is(src));
655 DCHECK(!scratch0.is(scratch1));
656 DCHECK(!scratch1.is(src));
657 MemOperand location = VarOperand(var, scratch0);
658 __ mov(location, src);
659
660 // Emit the write barrier code if the location is in the heap.
661 if (var->IsContextSlot()) {
662 int offset = Context::SlotOffset(var->index());
663 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
664 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
665 }
666}
667
668
669void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
670 bool should_normalize,
671 Label* if_true,
672 Label* if_false) {
673 // Only prepare for bailouts before splits if we're in a test
674 // context. Otherwise, we let the Visit function deal with the
675 // preparation to avoid preparing with the same AST id twice.
676 if (!context()->IsTest()) return;
677
678 Label skip;
679 if (should_normalize) __ jmp(&skip, Label::kNear);
680 PrepareForBailout(expr, TOS_REG);
681 if (should_normalize) {
682 __ cmp(eax, isolate()->factory()->true_value());
683 Split(equal, if_true, if_false, NULL);
684 __ bind(&skip);
685 }
686}
687
688
689void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
690 // The variable in the declaration always resides in the current context.
691 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100692 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000693 // Check that we're not inside a with or catch context.
694 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
695 __ cmp(ebx, isolate()->factory()->with_context_map());
696 __ Check(not_equal, kDeclarationInWithContext);
697 __ cmp(ebx, isolate()->factory()->catch_context_map());
698 __ Check(not_equal, kDeclarationInCatchContext);
699 }
700}
701
702
703void FullCodeGenerator::VisitVariableDeclaration(
704 VariableDeclaration* declaration) {
705 // If it was not possible to allocate the variable at compile time, we
706 // need to "declare" it at runtime to make sure it actually exists in the
707 // local context.
708 VariableProxy* proxy = declaration->proxy();
709 VariableMode mode = declaration->mode();
710 Variable* variable = proxy->var();
711 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
712 switch (variable->location()) {
713 case VariableLocation::GLOBAL:
714 case VariableLocation::UNALLOCATED:
715 globals_->Add(variable->name(), zone());
716 globals_->Add(variable->binding_needs_init()
717 ? isolate()->factory()->the_hole_value()
718 : isolate()->factory()->undefined_value(), zone());
719 break;
720
721 case VariableLocation::PARAMETER:
722 case VariableLocation::LOCAL:
723 if (hole_init) {
724 Comment cmnt(masm_, "[ VariableDeclaration");
725 __ mov(StackOperand(variable),
726 Immediate(isolate()->factory()->the_hole_value()));
727 }
728 break;
729
730 case VariableLocation::CONTEXT:
731 if (hole_init) {
732 Comment cmnt(masm_, "[ VariableDeclaration");
733 EmitDebugCheckDeclarationContext(variable);
734 __ mov(ContextOperand(esi, variable->index()),
735 Immediate(isolate()->factory()->the_hole_value()));
736 // No write barrier since the hole value is in old space.
737 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
738 }
739 break;
740
741 case VariableLocation::LOOKUP: {
742 Comment cmnt(masm_, "[ VariableDeclaration");
743 __ push(Immediate(variable->name()));
744 // VariableDeclaration nodes are always introduced in one of four modes.
745 DCHECK(IsDeclaredVariableMode(mode));
746 // Push initial value, if any.
747 // Note: For variables we must not push an initial value (such as
748 // 'undefined') because we may have a (legal) redeclaration and we
749 // must not destroy the current value.
750 if (hole_init) {
751 __ push(Immediate(isolate()->factory()->the_hole_value()));
752 } else {
753 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
754 }
755 __ push(
756 Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
757 __ CallRuntime(Runtime::kDeclareLookupSlot);
758 break;
759 }
760 }
761}
762
763
764void FullCodeGenerator::VisitFunctionDeclaration(
765 FunctionDeclaration* declaration) {
766 VariableProxy* proxy = declaration->proxy();
767 Variable* variable = proxy->var();
768 switch (variable->location()) {
769 case VariableLocation::GLOBAL:
770 case VariableLocation::UNALLOCATED: {
771 globals_->Add(variable->name(), zone());
772 Handle<SharedFunctionInfo> function =
773 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
774 // Check for stack-overflow exception.
775 if (function.is_null()) return SetStackOverflow();
776 globals_->Add(function, zone());
777 break;
778 }
779
780 case VariableLocation::PARAMETER:
781 case VariableLocation::LOCAL: {
782 Comment cmnt(masm_, "[ FunctionDeclaration");
783 VisitForAccumulatorValue(declaration->fun());
784 __ mov(StackOperand(variable), result_register());
785 break;
786 }
787
788 case VariableLocation::CONTEXT: {
789 Comment cmnt(masm_, "[ FunctionDeclaration");
790 EmitDebugCheckDeclarationContext(variable);
791 VisitForAccumulatorValue(declaration->fun());
792 __ mov(ContextOperand(esi, variable->index()), result_register());
793 // We know that we have written a function, which is not a smi.
794 __ RecordWriteContextSlot(esi,
795 Context::SlotOffset(variable->index()),
796 result_register(),
797 ecx,
798 kDontSaveFPRegs,
799 EMIT_REMEMBERED_SET,
800 OMIT_SMI_CHECK);
801 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
802 break;
803 }
804
805 case VariableLocation::LOOKUP: {
806 Comment cmnt(masm_, "[ FunctionDeclaration");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100807 PushOperand(variable->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000808 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100809 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
810 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000811 break;
812 }
813 }
814}
815
816
817void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
818 // Call the runtime to declare the globals.
819 __ Push(pairs);
820 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
821 __ CallRuntime(Runtime::kDeclareGlobals);
822 // Return value is ignored.
823}
824
825
826void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
827 // Call the runtime to declare the modules.
828 __ Push(descriptions);
829 __ CallRuntime(Runtime::kDeclareModules);
830 // Return value is ignored.
831}
832
833
834void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
835 Comment cmnt(masm_, "[ SwitchStatement");
836 Breakable nested_statement(this, stmt);
837 SetStatementPosition(stmt);
838
839 // Keep the switch value on the stack until a case matches.
840 VisitForStackValue(stmt->tag());
841 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
842
843 ZoneList<CaseClause*>* clauses = stmt->cases();
844 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
845
846 Label next_test; // Recycled for each test.
847 // Compile all the tests with branches to their bodies.
848 for (int i = 0; i < clauses->length(); i++) {
849 CaseClause* clause = clauses->at(i);
850 clause->body_target()->Unuse();
851
852 // The default is not a test, but remember it as final fall through.
853 if (clause->is_default()) {
854 default_clause = clause;
855 continue;
856 }
857
858 Comment cmnt(masm_, "[ Case comparison");
859 __ bind(&next_test);
860 next_test.Unuse();
861
862 // Compile the label expression.
863 VisitForAccumulatorValue(clause->label());
864
865 // Perform the comparison as if via '==='.
866 __ mov(edx, Operand(esp, 0)); // Switch value.
867 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
868 JumpPatchSite patch_site(masm_);
869 if (inline_smi_code) {
870 Label slow_case;
871 __ mov(ecx, edx);
872 __ or_(ecx, eax);
873 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
874
875 __ cmp(edx, eax);
876 __ j(not_equal, &next_test);
877 __ Drop(1); // Switch value is no longer needed.
878 __ jmp(clause->body_target());
879 __ bind(&slow_case);
880 }
881
882 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100883 Handle<Code> ic =
884 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000885 CallIC(ic, clause->CompareId());
886 patch_site.EmitPatchInfo();
887
888 Label skip;
889 __ jmp(&skip, Label::kNear);
890 PrepareForBailout(clause, TOS_REG);
891 __ cmp(eax, isolate()->factory()->true_value());
892 __ j(not_equal, &next_test);
893 __ Drop(1);
894 __ jmp(clause->body_target());
895 __ bind(&skip);
896
897 __ test(eax, eax);
898 __ j(not_equal, &next_test);
899 __ Drop(1); // Switch value is no longer needed.
900 __ jmp(clause->body_target());
901 }
902
903 // Discard the test value and jump to the default if present, otherwise to
904 // the end of the statement.
905 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100906 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000907 if (default_clause == NULL) {
908 __ jmp(nested_statement.break_label());
909 } else {
910 __ jmp(default_clause->body_target());
911 }
912
913 // Compile all the case bodies.
914 for (int i = 0; i < clauses->length(); i++) {
915 Comment cmnt(masm_, "[ Case body");
916 CaseClause* clause = clauses->at(i);
917 __ bind(clause->body_target());
918 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
919 VisitStatements(clause->statements());
920 }
921
922 __ bind(nested_statement.break_label());
923 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
924}
925
926
927void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
928 Comment cmnt(masm_, "[ ForInStatement");
929 SetStatementPosition(stmt, SKIP_BREAK);
930
931 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
932
Ben Murdoch097c5b22016-05-18 11:27:45 +0100933 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000934 SetExpressionAsStatementPosition(stmt->enumerable());
935 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100936 OperandStackDepthIncrement(5);
937
938 Label loop, exit;
939 Iteration loop_statement(this, stmt);
940 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000941
Ben Murdoch097c5b22016-05-18 11:27:45 +0100942 // If the object is null or undefined, skip over the loop, otherwise convert
943 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000944 Label convert, done_convert;
945 __ JumpIfSmi(eax, &convert, Label::kNear);
946 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
947 __ j(above_equal, &done_convert, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100948 __ cmp(eax, isolate()->factory()->undefined_value());
949 __ j(equal, &exit);
950 __ cmp(eax, isolate()->factory()->null_value());
951 __ j(equal, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000952 __ bind(&convert);
953 ToObjectStub stub(isolate());
954 __ CallStub(&stub);
955 __ bind(&done_convert);
956 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
957 __ push(eax);
958
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000959 // Check cache validity in generated code. This is a fast case for
960 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
961 // guarantee cache validity, call the runtime system to check cache
962 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100963 // Note: Proxies never have an enum cache, so will always take the
964 // slow path.
965 Label call_runtime, use_cache, fixed_array;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000966 __ CheckEnumCache(&call_runtime);
967
968 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
969 __ jmp(&use_cache, Label::kNear);
970
971 // Get the set of properties to enumerate.
972 __ bind(&call_runtime);
973 __ push(eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100974 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000975 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
976 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
977 isolate()->factory()->meta_map());
978 __ j(not_equal, &fixed_array);
979
980
981 // We got a map in register eax. Get the enumeration cache from it.
982 Label no_descriptors;
983 __ bind(&use_cache);
984
985 __ EnumLength(edx, eax);
986 __ cmp(edx, Immediate(Smi::FromInt(0)));
987 __ j(equal, &no_descriptors);
988
989 __ LoadInstanceDescriptors(eax, ecx);
990 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
991 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
992
993 // Set up the four remaining stack slots.
994 __ push(eax); // Map.
995 __ push(ecx); // Enumeration cache.
996 __ push(edx); // Number of valid entries for the map in the enum cache.
997 __ push(Immediate(Smi::FromInt(0))); // Initial index.
998 __ jmp(&loop);
999
1000 __ bind(&no_descriptors);
1001 __ add(esp, Immediate(kPointerSize));
1002 __ jmp(&exit);
1003
1004 // We got a fixed array in register eax. Iterate through that.
1005 __ bind(&fixed_array);
1006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001007 __ push(Immediate(Smi::FromInt(1))); // Smi(1) indicates slow check
1008 __ push(eax); // Array
1009 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1010 __ push(eax); // Fixed array length (as smi).
Ben Murdoch097c5b22016-05-18 11:27:45 +01001011 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001012 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1013
1014 // Generate code for doing the condition check.
1015 __ bind(&loop);
1016 SetExpressionAsStatementPosition(stmt->each());
1017
1018 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1019 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1020 __ j(above_equal, loop_statement.break_label());
1021
1022 // Get the current entry of the array into register ebx.
1023 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1024 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1025
1026 // Get the expected map from the stack or a smi in the
1027 // permanent slow case into register edx.
1028 __ mov(edx, Operand(esp, 3 * kPointerSize));
1029
1030 // Check if the expected map still matches that of the enumerable.
1031 // If not, we may have to filter the key.
1032 Label update_each;
1033 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1034 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1035 __ j(equal, &update_each, Label::kNear);
1036
Ben Murdochda12d292016-06-02 14:46:10 +01001037 // We need to filter the key, record slow-path here.
1038 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001039 __ EmitLoadTypeFeedbackVector(edx);
1040 __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1041 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1042
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001043 // Convert the entry to a string or null if it isn't a property
1044 // anymore. If the property has been removed while iterating, we
1045 // just skip it.
1046 __ push(ecx); // Enumerable.
1047 __ push(ebx); // Current entry.
1048 __ CallRuntime(Runtime::kForInFilter);
1049 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1050 __ cmp(eax, isolate()->factory()->undefined_value());
1051 __ j(equal, loop_statement.continue_label());
1052 __ mov(ebx, eax);
1053
1054 // Update the 'each' property or variable from the possibly filtered
1055 // entry in register ebx.
1056 __ bind(&update_each);
1057 __ mov(result_register(), ebx);
1058 // Perform the assignment as if via '='.
1059 { EffectContext context(this);
1060 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1061 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1062 }
1063
1064 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1065 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1066 // Generate code for the body of the loop.
1067 Visit(stmt->body());
1068
1069 // Generate code for going to the next element by incrementing the
1070 // index (smi) stored on top of the stack.
1071 __ bind(loop_statement.continue_label());
1072 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1073
1074 EmitBackEdgeBookkeeping(stmt, &loop);
1075 __ jmp(&loop);
1076
1077 // Remove the pointers stored on the stack.
1078 __ bind(loop_statement.break_label());
Ben Murdochda12d292016-06-02 14:46:10 +01001079 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080
1081 // Exit and decrement the loop depth.
1082 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1083 __ bind(&exit);
1084 decrement_loop_depth();
1085}
1086
1087
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1089 FeedbackVectorSlot slot) {
1090 DCHECK(NeedsHomeObject(initializer));
1091 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1092 __ mov(StoreDescriptor::NameRegister(),
1093 Immediate(isolate()->factory()->home_object_symbol()));
1094 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1095 EmitLoadStoreICSlot(slot);
1096 CallStoreIC();
1097}
1098
1099
1100void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1101 int offset,
1102 FeedbackVectorSlot slot) {
1103 DCHECK(NeedsHomeObject(initializer));
1104 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1105 __ mov(StoreDescriptor::NameRegister(),
1106 Immediate(isolate()->factory()->home_object_symbol()));
1107 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1108 EmitLoadStoreICSlot(slot);
1109 CallStoreIC();
1110}
1111
1112
1113void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1114 TypeofMode typeof_mode,
1115 Label* slow) {
1116 Register context = esi;
1117 Register temp = edx;
1118
1119 Scope* s = scope();
1120 while (s != NULL) {
1121 if (s->num_heap_slots() > 0) {
1122 if (s->calls_sloppy_eval()) {
1123 // Check that extension is "the hole".
1124 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1125 Heap::kTheHoleValueRootIndex, slow);
1126 }
1127 // Load next context in chain.
1128 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1129 // Walk the rest of the chain without clobbering esi.
1130 context = temp;
1131 }
1132 // If no outer scope calls eval, we do not need to check more
1133 // context extensions. If we have reached an eval scope, we check
1134 // all extensions from this point.
1135 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1136 s = s->outer_scope();
1137 }
1138
1139 if (s != NULL && s->is_eval_scope()) {
1140 // Loop up the context chain. There is no frame effect so it is
1141 // safe to use raw labels here.
1142 Label next, fast;
1143 if (!context.is(temp)) {
1144 __ mov(temp, context);
1145 }
1146 __ bind(&next);
1147 // Terminate at native context.
1148 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1149 Immediate(isolate()->factory()->native_context_map()));
1150 __ j(equal, &fast, Label::kNear);
1151 // Check that extension is "the hole".
1152 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1153 Heap::kTheHoleValueRootIndex, slow);
1154 // Load next context in chain.
1155 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1156 __ jmp(&next);
1157 __ bind(&fast);
1158 }
1159
1160 // All extension objects were empty and it is safe to use a normal global
1161 // load machinery.
1162 EmitGlobalVariableLoad(proxy, typeof_mode);
1163}
1164
1165
1166MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1167 Label* slow) {
1168 DCHECK(var->IsContextSlot());
1169 Register context = esi;
1170 Register temp = ebx;
1171
1172 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1173 if (s->num_heap_slots() > 0) {
1174 if (s->calls_sloppy_eval()) {
1175 // Check that extension is "the hole".
1176 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1177 Heap::kTheHoleValueRootIndex, slow);
1178 }
1179 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1180 // Walk the rest of the chain without clobbering esi.
1181 context = temp;
1182 }
1183 }
1184 // Check that last extension is "the hole".
1185 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1186 Heap::kTheHoleValueRootIndex, slow);
1187
1188 // This function is used only for loads, not stores, so it's safe to
1189 // return an esi-based operand (the write barrier cannot be allowed to
1190 // destroy the esi register).
1191 return ContextOperand(context, var->index());
1192}
1193
1194
1195void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1196 TypeofMode typeof_mode,
1197 Label* slow, Label* done) {
1198 // Generate fast-case code for variables that might be shadowed by
1199 // eval-introduced variables. Eval is used a lot without
1200 // introducing variables. In those cases, we do not want to
1201 // perform a runtime call for all variables in the scope
1202 // containing the eval.
1203 Variable* var = proxy->var();
1204 if (var->mode() == DYNAMIC_GLOBAL) {
1205 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1206 __ jmp(done);
1207 } else if (var->mode() == DYNAMIC_LOCAL) {
1208 Variable* local = var->local_if_not_shadowed();
1209 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1210 if (local->mode() == LET || local->mode() == CONST ||
1211 local->mode() == CONST_LEGACY) {
1212 __ cmp(eax, isolate()->factory()->the_hole_value());
1213 __ j(not_equal, done);
1214 if (local->mode() == CONST_LEGACY) {
1215 __ mov(eax, isolate()->factory()->undefined_value());
1216 } else { // LET || CONST
1217 __ push(Immediate(var->name()));
1218 __ CallRuntime(Runtime::kThrowReferenceError);
1219 }
1220 }
1221 __ jmp(done);
1222 }
1223}
1224
1225
1226void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1227 TypeofMode typeof_mode) {
1228 Variable* var = proxy->var();
1229 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1230 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1231 __ mov(LoadDescriptor::ReceiverRegister(), NativeContextOperand());
1232 __ mov(LoadDescriptor::ReceiverRegister(),
1233 ContextOperand(LoadDescriptor::ReceiverRegister(),
1234 Context::EXTENSION_INDEX));
1235 __ mov(LoadDescriptor::NameRegister(), var->name());
1236 __ mov(LoadDescriptor::SlotRegister(),
1237 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1238 CallLoadIC(typeof_mode);
1239}
1240
1241
1242void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1243 TypeofMode typeof_mode) {
1244 SetExpressionPosition(proxy);
1245 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1246 Variable* var = proxy->var();
1247
1248 // Three cases: global variables, lookup variables, and all other types of
1249 // variables.
1250 switch (var->location()) {
1251 case VariableLocation::GLOBAL:
1252 case VariableLocation::UNALLOCATED: {
1253 Comment cmnt(masm_, "[ Global variable");
1254 EmitGlobalVariableLoad(proxy, typeof_mode);
1255 context()->Plug(eax);
1256 break;
1257 }
1258
1259 case VariableLocation::PARAMETER:
1260 case VariableLocation::LOCAL:
1261 case VariableLocation::CONTEXT: {
1262 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1263 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1264 : "[ Stack variable");
1265
1266 if (NeedsHoleCheckForLoad(proxy)) {
1267 // Let and const need a read barrier.
1268 Label done;
1269 GetVar(eax, var);
1270 __ cmp(eax, isolate()->factory()->the_hole_value());
1271 __ j(not_equal, &done, Label::kNear);
1272 if (var->mode() == LET || var->mode() == CONST) {
1273 // Throw a reference error when using an uninitialized let/const
1274 // binding in harmony mode.
1275 __ push(Immediate(var->name()));
1276 __ CallRuntime(Runtime::kThrowReferenceError);
1277 } else {
1278 // Uninitialized legacy const bindings are unholed.
1279 DCHECK(var->mode() == CONST_LEGACY);
1280 __ mov(eax, isolate()->factory()->undefined_value());
1281 }
1282 __ bind(&done);
1283 context()->Plug(eax);
1284 break;
1285 }
1286 context()->Plug(var);
1287 break;
1288 }
1289
1290 case VariableLocation::LOOKUP: {
1291 Comment cmnt(masm_, "[ Lookup variable");
1292 Label done, slow;
1293 // Generate code for loading from variables potentially shadowed
1294 // by eval-introduced variables.
1295 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1296 __ bind(&slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001297 __ push(Immediate(var->name()));
1298 Runtime::FunctionId function_id =
1299 typeof_mode == NOT_INSIDE_TYPEOF
1300 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001301 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001302 __ CallRuntime(function_id);
1303 __ bind(&done);
1304 context()->Plug(eax);
1305 break;
1306 }
1307 }
1308}
1309
1310
1311void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1312 Comment cmnt(masm_, "[ RegExpLiteral");
1313 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1314 __ Move(eax, Immediate(Smi::FromInt(expr->literal_index())));
1315 __ Move(ecx, Immediate(expr->pattern()));
1316 __ Move(edx, Immediate(Smi::FromInt(expr->flags())));
1317 FastCloneRegExpStub stub(isolate());
1318 __ CallStub(&stub);
1319 context()->Plug(eax);
1320}
1321
1322
1323void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1324 Expression* expression = (property == NULL) ? NULL : property->value();
1325 if (expression == NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001326 PushOperand(isolate()->factory()->null_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001327 } else {
1328 VisitForStackValue(expression);
1329 if (NeedsHomeObject(expression)) {
1330 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1331 property->kind() == ObjectLiteral::Property::SETTER);
1332 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1333 EmitSetHomeObject(expression, offset, property->GetSlot());
1334 }
1335 }
1336}
1337
1338
1339void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1340 Comment cmnt(masm_, "[ ObjectLiteral");
1341
1342 Handle<FixedArray> constant_properties = expr->constant_properties();
1343 int flags = expr->ComputeFlags();
1344 // If any of the keys would store to the elements array, then we shouldn't
1345 // allow it.
1346 if (MustCreateObjectLiteralWithRuntime(expr)) {
1347 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1348 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1349 __ push(Immediate(constant_properties));
1350 __ push(Immediate(Smi::FromInt(flags)));
1351 __ CallRuntime(Runtime::kCreateObjectLiteral);
1352 } else {
1353 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1354 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1355 __ mov(ecx, Immediate(constant_properties));
1356 __ mov(edx, Immediate(Smi::FromInt(flags)));
1357 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1358 __ CallStub(&stub);
1359 }
1360 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1361
1362 // If result_saved is true the result is on top of the stack. If
1363 // result_saved is false the result is in eax.
1364 bool result_saved = false;
1365
1366 AccessorTable accessor_table(zone());
1367 int property_index = 0;
1368 for (; property_index < expr->properties()->length(); property_index++) {
1369 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1370 if (property->is_computed_name()) break;
1371 if (property->IsCompileTimeValue()) continue;
1372
1373 Literal* key = property->key()->AsLiteral();
1374 Expression* value = property->value();
1375 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001376 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001377 result_saved = true;
1378 }
1379 switch (property->kind()) {
1380 case ObjectLiteral::Property::CONSTANT:
1381 UNREACHABLE();
1382 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1383 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1384 // Fall through.
1385 case ObjectLiteral::Property::COMPUTED:
1386 // It is safe to use [[Put]] here because the boilerplate already
1387 // contains computed properties with an uninitialized value.
1388 if (key->value()->IsInternalizedString()) {
1389 if (property->emit_store()) {
1390 VisitForAccumulatorValue(value);
1391 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1392 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1393 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1394 EmitLoadStoreICSlot(property->GetSlot(0));
1395 CallStoreIC();
1396 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1397 if (NeedsHomeObject(value)) {
1398 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1399 }
1400 } else {
1401 VisitForEffect(value);
1402 }
1403 break;
1404 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001405 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001406 VisitForStackValue(key);
1407 VisitForStackValue(value);
1408 if (property->emit_store()) {
1409 if (NeedsHomeObject(value)) {
1410 EmitSetHomeObject(value, 2, property->GetSlot());
1411 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001412 PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1413 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001414 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001415 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001416 }
1417 break;
1418 case ObjectLiteral::Property::PROTOTYPE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001419 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001420 VisitForStackValue(value);
1421 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001422 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001423 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1424 NO_REGISTERS);
1425 break;
1426 case ObjectLiteral::Property::GETTER:
1427 if (property->emit_store()) {
1428 accessor_table.lookup(key)->second->getter = property;
1429 }
1430 break;
1431 case ObjectLiteral::Property::SETTER:
1432 if (property->emit_store()) {
1433 accessor_table.lookup(key)->second->setter = property;
1434 }
1435 break;
1436 }
1437 }
1438
1439 // Emit code to define accessors, using only a single call to the runtime for
1440 // each pair of corresponding getters and setters.
1441 for (AccessorTable::Iterator it = accessor_table.begin();
1442 it != accessor_table.end();
1443 ++it) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001444 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001445 VisitForStackValue(it->first);
1446
1447 EmitAccessor(it->second->getter);
1448 EmitAccessor(it->second->setter);
1449
Ben Murdoch097c5b22016-05-18 11:27:45 +01001450 PushOperand(Smi::FromInt(NONE));
1451 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001452 }
1453
1454 // Object literals have two parts. The "static" part on the left contains no
1455 // computed property names, and so we can compute its map ahead of time; see
1456 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1457 // starts with the first computed property name, and continues with all
1458 // properties to its right. All the code from above initializes the static
1459 // component of the object literal, and arranges for the map of the result to
1460 // reflect the static order in which the keys appear. For the dynamic
1461 // properties, we compile them into a series of "SetOwnProperty" runtime
1462 // calls. This will preserve insertion order.
1463 for (; property_index < expr->properties()->length(); property_index++) {
1464 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1465
1466 Expression* value = property->value();
1467 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001468 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001469 result_saved = true;
1470 }
1471
Ben Murdoch097c5b22016-05-18 11:27:45 +01001472 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001473
1474 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1475 DCHECK(!property->is_computed_name());
1476 VisitForStackValue(value);
1477 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001478 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001479 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1480 NO_REGISTERS);
1481 } else {
1482 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1483 VisitForStackValue(value);
1484 if (NeedsHomeObject(value)) {
1485 EmitSetHomeObject(value, 2, property->GetSlot());
1486 }
1487
1488 switch (property->kind()) {
1489 case ObjectLiteral::Property::CONSTANT:
1490 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1491 case ObjectLiteral::Property::COMPUTED:
1492 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001493 PushOperand(Smi::FromInt(NONE));
1494 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1495 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001497 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001498 }
1499 break;
1500
1501 case ObjectLiteral::Property::PROTOTYPE:
1502 UNREACHABLE();
1503 break;
1504
1505 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001506 PushOperand(Smi::FromInt(NONE));
1507 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001508 break;
1509
1510 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001511 PushOperand(Smi::FromInt(NONE));
1512 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001513 break;
1514 }
1515 }
1516 }
1517
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518 if (result_saved) {
1519 context()->PlugTOS();
1520 } else {
1521 context()->Plug(eax);
1522 }
1523}
1524
1525
1526void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1527 Comment cmnt(masm_, "[ ArrayLiteral");
1528
1529 Handle<FixedArray> constant_elements = expr->constant_elements();
1530 bool has_constant_fast_elements =
1531 IsFastObjectElementsKind(expr->constant_elements_kind());
1532
1533 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1534 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1535 // If the only customer of allocation sites is transitioning, then
1536 // we can turn it off if we don't have anywhere else to transition to.
1537 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1538 }
1539
1540 if (MustCreateArrayLiteralWithRuntime(expr)) {
1541 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1542 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1543 __ push(Immediate(constant_elements));
1544 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1545 __ CallRuntime(Runtime::kCreateArrayLiteral);
1546 } else {
1547 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1548 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1549 __ mov(ecx, Immediate(constant_elements));
1550 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1551 __ CallStub(&stub);
1552 }
1553 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1554
1555 bool result_saved = false; // Is the result saved to the stack?
1556 ZoneList<Expression*>* subexprs = expr->values();
1557 int length = subexprs->length();
1558
1559 // Emit code to evaluate all the non-constant subexpressions and to store
1560 // them into the newly cloned array.
1561 int array_index = 0;
1562 for (; array_index < length; array_index++) {
1563 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001564 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001565
1566 // If the subexpression is a literal or a simple materialized literal it
1567 // is already set in the cloned array.
1568 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1569
1570 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001571 PushOperand(eax); // array literal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 result_saved = true;
1573 }
1574 VisitForAccumulatorValue(subexpr);
1575
1576 __ mov(StoreDescriptor::NameRegister(),
1577 Immediate(Smi::FromInt(array_index)));
1578 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1579 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1580 Handle<Code> ic =
1581 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1582 CallIC(ic);
1583 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1584 }
1585
1586 // In case the array literal contains spread expressions it has two parts. The
1587 // first part is the "static" array which has a literal index is handled
1588 // above. The second part is the part after the first spread expression
1589 // (inclusive) and these elements gets appended to the array. Note that the
1590 // number elements an iterable produces is unknown ahead of time.
1591 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001592 PopOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 result_saved = false;
1594 }
1595 for (; array_index < length; array_index++) {
1596 Expression* subexpr = subexprs->at(array_index);
1597
Ben Murdoch097c5b22016-05-18 11:27:45 +01001598 PushOperand(eax);
1599 DCHECK(!subexpr->IsSpread());
1600 VisitForStackValue(subexpr);
1601 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001602
1603 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1604 }
1605
1606 if (result_saved) {
1607 context()->PlugTOS();
1608 } else {
1609 context()->Plug(eax);
1610 }
1611}
1612
1613
1614void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1615 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1616
1617 Comment cmnt(masm_, "[ Assignment");
1618 SetExpressionPosition(expr, INSERT_BREAK);
1619
1620 Property* property = expr->target()->AsProperty();
1621 LhsKind assign_type = Property::GetAssignType(property);
1622
1623 // Evaluate LHS expression.
1624 switch (assign_type) {
1625 case VARIABLE:
1626 // Nothing to do here.
1627 break;
1628 case NAMED_SUPER_PROPERTY:
1629 VisitForStackValue(
1630 property->obj()->AsSuperPropertyReference()->this_var());
1631 VisitForAccumulatorValue(
1632 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001633 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001635 PushOperand(MemOperand(esp, kPointerSize));
1636 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001637 }
1638 break;
1639 case NAMED_PROPERTY:
1640 if (expr->is_compound()) {
1641 // We need the receiver both on the stack and in the register.
1642 VisitForStackValue(property->obj());
1643 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1644 } else {
1645 VisitForStackValue(property->obj());
1646 }
1647 break;
1648 case KEYED_SUPER_PROPERTY:
1649 VisitForStackValue(
1650 property->obj()->AsSuperPropertyReference()->this_var());
1651 VisitForStackValue(
1652 property->obj()->AsSuperPropertyReference()->home_object());
1653 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001654 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001655 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001656 PushOperand(MemOperand(esp, 2 * kPointerSize));
1657 PushOperand(MemOperand(esp, 2 * kPointerSize));
1658 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001659 }
1660 break;
1661 case KEYED_PROPERTY: {
1662 if (expr->is_compound()) {
1663 VisitForStackValue(property->obj());
1664 VisitForStackValue(property->key());
1665 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1666 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1667 } else {
1668 VisitForStackValue(property->obj());
1669 VisitForStackValue(property->key());
1670 }
1671 break;
1672 }
1673 }
1674
1675 // For compound assignments we need another deoptimization point after the
1676 // variable/property load.
1677 if (expr->is_compound()) {
1678 AccumulatorValueContext result_context(this);
1679 { AccumulatorValueContext left_operand_context(this);
1680 switch (assign_type) {
1681 case VARIABLE:
1682 EmitVariableLoad(expr->target()->AsVariableProxy());
1683 PrepareForBailout(expr->target(), TOS_REG);
1684 break;
1685 case NAMED_SUPER_PROPERTY:
1686 EmitNamedSuperPropertyLoad(property);
1687 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1688 break;
1689 case NAMED_PROPERTY:
1690 EmitNamedPropertyLoad(property);
1691 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1692 break;
1693 case KEYED_SUPER_PROPERTY:
1694 EmitKeyedSuperPropertyLoad(property);
1695 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1696 break;
1697 case KEYED_PROPERTY:
1698 EmitKeyedPropertyLoad(property);
1699 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1700 break;
1701 }
1702 }
1703
1704 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001705 PushOperand(eax); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706 VisitForAccumulatorValue(expr->value());
1707
1708 if (ShouldInlineSmiCase(op)) {
1709 EmitInlineSmiBinaryOp(expr->binary_operation(),
1710 op,
1711 expr->target(),
1712 expr->value());
1713 } else {
1714 EmitBinaryOp(expr->binary_operation(), op);
1715 }
1716
1717 // Deoptimization point in case the binary operation may have side effects.
1718 PrepareForBailout(expr->binary_operation(), TOS_REG);
1719 } else {
1720 VisitForAccumulatorValue(expr->value());
1721 }
1722
1723 SetExpressionPosition(expr);
1724
1725 // Store the value.
1726 switch (assign_type) {
1727 case VARIABLE:
1728 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1729 expr->op(), expr->AssignmentSlot());
1730 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1731 context()->Plug(eax);
1732 break;
1733 case NAMED_PROPERTY:
1734 EmitNamedPropertyAssignment(expr);
1735 break;
1736 case NAMED_SUPER_PROPERTY:
1737 EmitNamedSuperPropertyStore(property);
1738 context()->Plug(result_register());
1739 break;
1740 case KEYED_SUPER_PROPERTY:
1741 EmitKeyedSuperPropertyStore(property);
1742 context()->Plug(result_register());
1743 break;
1744 case KEYED_PROPERTY:
1745 EmitKeyedPropertyAssignment(expr);
1746 break;
1747 }
1748}
1749
1750
1751void FullCodeGenerator::VisitYield(Yield* expr) {
1752 Comment cmnt(masm_, "[ Yield");
1753 SetExpressionPosition(expr);
1754
1755 // Evaluate yielded value first; the initial iterator definition depends on
1756 // this. It stays on the stack while we update the iterator.
1757 VisitForStackValue(expr->expression());
1758
Ben Murdochda12d292016-06-02 14:46:10 +01001759 Label suspend, continuation, post_runtime, resume;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001760
Ben Murdochda12d292016-06-02 14:46:10 +01001761 __ jmp(&suspend);
1762 __ bind(&continuation);
1763 // When we arrive here, the stack top is the resume mode and
1764 // result_register() holds the input value (the argument given to the
1765 // respective resume operation).
1766 __ RecordGeneratorContinuation();
1767 __ pop(ebx);
1768 __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::RETURN)));
1769 __ j(not_equal, &resume);
1770 __ push(result_register());
1771 EmitCreateIteratorResult(true);
1772 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001773
Ben Murdochda12d292016-06-02 14:46:10 +01001774 __ bind(&suspend);
1775 OperandStackDepthIncrement(1); // Not popped on this path.
1776 VisitForAccumulatorValue(expr->generator_object());
1777 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1778 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1779 Immediate(Smi::FromInt(continuation.pos())));
1780 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1781 __ mov(ecx, esi);
1782 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1783 kDontSaveFPRegs);
1784 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1785 __ cmp(esp, ebx);
1786 __ j(equal, &post_runtime);
1787 __ push(eax); // generator object
1788 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1789 __ mov(context_register(),
1790 Operand(ebp, StandardFrameConstants::kContextOffset));
1791 __ bind(&post_runtime);
1792 PopOperand(result_register());
1793 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001794
Ben Murdochda12d292016-06-02 14:46:10 +01001795 __ bind(&resume);
1796 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001797}
1798
1799
1800void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1801 Expression *value,
1802 JSGeneratorObject::ResumeMode resume_mode) {
1803 // The value stays in eax, and is ultimately read by the resumed generator, as
1804 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1805 // is read to throw the value when the resumed generator is already closed.
1806 // ebx will hold the generator object until the activation has been resumed.
1807 VisitForStackValue(generator);
1808 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001809 PopOperand(ebx);
1810
1811 // Store input value into generator object.
1812 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOffset), result_register());
1813 __ mov(ecx, result_register());
1814 __ RecordWriteField(ebx, JSGeneratorObject::kInputOffset, ecx, edx,
1815 kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001816
1817 // Load suspended function and context.
1818 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
1819 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
1820
1821 // Push receiver.
1822 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
1823
Ben Murdochda12d292016-06-02 14:46:10 +01001824 // Push holes for arguments to generator function. Since the parser forced
1825 // context allocation for any variables in generators, the actual argument
1826 // values have already been copied into the context and these dummy values
1827 // will never be used.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001828 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1829 __ mov(edx,
1830 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1831 __ mov(ecx, isolate()->factory()->the_hole_value());
1832 Label push_argument_holes, push_frame;
1833 __ bind(&push_argument_holes);
1834 __ sub(edx, Immediate(Smi::FromInt(1)));
1835 __ j(carry, &push_frame);
1836 __ push(ecx);
1837 __ jmp(&push_argument_holes);
1838
1839 // Enter a new JavaScript frame, and initialize its slots as they were when
1840 // the generator was suspended.
1841 Label resume_frame, done;
1842 __ bind(&push_frame);
1843 __ call(&resume_frame);
1844 __ jmp(&done);
1845 __ bind(&resume_frame);
1846 __ push(ebp); // Caller's frame pointer.
1847 __ mov(ebp, esp);
1848 __ push(esi); // Callee's context.
1849 __ push(edi); // Callee's JS Function.
1850
1851 // Load the operand stack size.
1852 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
1853 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
1854 __ SmiUntag(edx);
1855
1856 // If we are sending a value and there is no operand stack, we can jump back
1857 // in directly.
1858 if (resume_mode == JSGeneratorObject::NEXT) {
1859 Label slow_resume;
1860 __ cmp(edx, Immediate(0));
1861 __ j(not_zero, &slow_resume);
1862 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
1863 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
1864 __ SmiUntag(ecx);
1865 __ add(edx, ecx);
1866 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
1867 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001868 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001869 __ jmp(edx);
1870 __ bind(&slow_resume);
1871 }
1872
1873 // Otherwise, we push holes for the operand stack and call the runtime to fix
1874 // up the stack and the handlers.
1875 Label push_operand_holes, call_resume;
1876 __ bind(&push_operand_holes);
1877 __ sub(edx, Immediate(1));
1878 __ j(carry, &call_resume);
1879 __ push(ecx);
1880 __ jmp(&push_operand_holes);
1881 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001882 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001883 __ push(ebx);
1884 __ push(result_register());
1885 __ Push(Smi::FromInt(resume_mode));
1886 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
1887 // Not reached: the runtime call returns elsewhere.
1888 __ Abort(kGeneratorFailedToResume);
1889
1890 __ bind(&done);
1891 context()->Plug(result_register());
1892}
1893
Ben Murdoch097c5b22016-05-18 11:27:45 +01001894void FullCodeGenerator::PushOperand(MemOperand operand) {
1895 OperandStackDepthIncrement(1);
1896 __ Push(operand);
1897}
1898
1899void FullCodeGenerator::EmitOperandStackDepthCheck() {
1900 if (FLAG_debug_code) {
1901 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1902 operand_stack_depth_ * kPointerSize;
1903 __ mov(eax, ebp);
1904 __ sub(eax, esp);
1905 __ cmp(eax, Immediate(expected_diff));
1906 __ Assert(equal, kUnexpectedStackDepth);
1907 }
1908}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001909
1910void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1911 Label allocate, done_allocate;
1912
1913 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate, TAG_OBJECT);
1914 __ jmp(&done_allocate, Label::kNear);
1915
1916 __ bind(&allocate);
1917 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1918 __ CallRuntime(Runtime::kAllocateInNewSpace);
1919
1920 __ bind(&done_allocate);
1921 __ mov(ebx, NativeContextOperand());
1922 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1923 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1924 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1925 isolate()->factory()->empty_fixed_array());
1926 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1927 isolate()->factory()->empty_fixed_array());
1928 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1929 __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1930 isolate()->factory()->ToBoolean(done));
1931 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
Ben Murdochda12d292016-06-02 14:46:10 +01001932 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001933}
1934
1935
1936void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1937 Token::Value op,
1938 Expression* left,
1939 Expression* right) {
1940 // Do combined smi check of the operands. Left operand is on the
1941 // stack. Right operand is in eax.
1942 Label smi_case, done, stub_call;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001943 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001944 __ mov(ecx, eax);
1945 __ or_(eax, edx);
1946 JumpPatchSite patch_site(masm_);
1947 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1948
1949 __ bind(&stub_call);
1950 __ mov(eax, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001951 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001952 CallIC(code, expr->BinaryOperationFeedbackId());
1953 patch_site.EmitPatchInfo();
1954 __ jmp(&done, Label::kNear);
1955
1956 // Smi case.
1957 __ bind(&smi_case);
1958 __ mov(eax, edx); // Copy left operand in case of a stub call.
1959
1960 switch (op) {
1961 case Token::SAR:
1962 __ SmiUntag(ecx);
1963 __ sar_cl(eax); // No checks of result necessary
1964 __ and_(eax, Immediate(~kSmiTagMask));
1965 break;
1966 case Token::SHL: {
1967 Label result_ok;
1968 __ SmiUntag(eax);
1969 __ SmiUntag(ecx);
1970 __ shl_cl(eax);
1971 // Check that the *signed* result fits in a smi.
1972 __ cmp(eax, 0xc0000000);
1973 __ j(positive, &result_ok);
1974 __ SmiTag(ecx);
1975 __ jmp(&stub_call);
1976 __ bind(&result_ok);
1977 __ SmiTag(eax);
1978 break;
1979 }
1980 case Token::SHR: {
1981 Label result_ok;
1982 __ SmiUntag(eax);
1983 __ SmiUntag(ecx);
1984 __ shr_cl(eax);
1985 __ test(eax, Immediate(0xc0000000));
1986 __ j(zero, &result_ok);
1987 __ SmiTag(ecx);
1988 __ jmp(&stub_call);
1989 __ bind(&result_ok);
1990 __ SmiTag(eax);
1991 break;
1992 }
1993 case Token::ADD:
1994 __ add(eax, ecx);
1995 __ j(overflow, &stub_call);
1996 break;
1997 case Token::SUB:
1998 __ sub(eax, ecx);
1999 __ j(overflow, &stub_call);
2000 break;
2001 case Token::MUL: {
2002 __ SmiUntag(eax);
2003 __ imul(eax, ecx);
2004 __ j(overflow, &stub_call);
2005 __ test(eax, eax);
2006 __ j(not_zero, &done, Label::kNear);
2007 __ mov(ebx, edx);
2008 __ or_(ebx, ecx);
2009 __ j(negative, &stub_call);
2010 break;
2011 }
2012 case Token::BIT_OR:
2013 __ or_(eax, ecx);
2014 break;
2015 case Token::BIT_AND:
2016 __ and_(eax, ecx);
2017 break;
2018 case Token::BIT_XOR:
2019 __ xor_(eax, ecx);
2020 break;
2021 default:
2022 UNREACHABLE();
2023 }
2024
2025 __ bind(&done);
2026 context()->Plug(eax);
2027}
2028
2029
2030void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002031 for (int i = 0; i < lit->properties()->length(); i++) {
2032 ObjectLiteral::Property* property = lit->properties()->at(i);
2033 Expression* value = property->value();
2034
2035 if (property->is_static()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002036 PushOperand(Operand(esp, kPointerSize)); // constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002037 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002038 PushOperand(Operand(esp, 0)); // prototype
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002039 }
2040 EmitPropertyKey(property, lit->GetIdForProperty(i));
2041
2042 // The static prototype property is read only. We handle the non computed
2043 // property name case in the parser. Since this is the only case where we
2044 // need to check for an own read only property we special case this so we do
2045 // not need to do this for every property.
2046 if (property->is_static() && property->is_computed_name()) {
2047 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2048 __ push(eax);
2049 }
2050
2051 VisitForStackValue(value);
2052 if (NeedsHomeObject(value)) {
2053 EmitSetHomeObject(value, 2, property->GetSlot());
2054 }
2055
2056 switch (property->kind()) {
2057 case ObjectLiteral::Property::CONSTANT:
2058 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2059 case ObjectLiteral::Property::PROTOTYPE:
2060 UNREACHABLE();
2061 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002062 PushOperand(Smi::FromInt(DONT_ENUM));
2063 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2064 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002065 break;
2066
2067 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002068 PushOperand(Smi::FromInt(DONT_ENUM));
2069 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002070 break;
2071
2072 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002073 PushOperand(Smi::FromInt(DONT_ENUM));
2074 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002075 break;
2076 }
2077 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002078}
2079
2080
2081void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002082 PopOperand(edx);
2083 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002084 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2085 CallIC(code, expr->BinaryOperationFeedbackId());
2086 patch_site.EmitPatchInfo();
2087 context()->Plug(eax);
2088}
2089
2090
2091void FullCodeGenerator::EmitAssignment(Expression* expr,
2092 FeedbackVectorSlot slot) {
2093 DCHECK(expr->IsValidReferenceExpressionOrThis());
2094
2095 Property* prop = expr->AsProperty();
2096 LhsKind assign_type = Property::GetAssignType(prop);
2097
2098 switch (assign_type) {
2099 case VARIABLE: {
2100 Variable* var = expr->AsVariableProxy()->var();
2101 EffectContext context(this);
2102 EmitVariableAssignment(var, Token::ASSIGN, slot);
2103 break;
2104 }
2105 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002106 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002107 VisitForAccumulatorValue(prop->obj());
2108 __ Move(StoreDescriptor::ReceiverRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002109 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002110 __ mov(StoreDescriptor::NameRegister(),
2111 prop->key()->AsLiteral()->value());
2112 EmitLoadStoreICSlot(slot);
2113 CallStoreIC();
2114 break;
2115 }
2116 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002117 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002118 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2119 VisitForAccumulatorValue(
2120 prop->obj()->AsSuperPropertyReference()->home_object());
2121 // stack: value, this; eax: home_object
2122 Register scratch = ecx;
2123 Register scratch2 = edx;
2124 __ mov(scratch, result_register()); // home_object
2125 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2126 __ mov(scratch2, MemOperand(esp, 0)); // this
2127 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2128 __ mov(MemOperand(esp, 0), scratch); // home_object
2129 // stack: this, home_object. eax: value
2130 EmitNamedSuperPropertyStore(prop);
2131 break;
2132 }
2133 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002134 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002135 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2136 VisitForStackValue(
2137 prop->obj()->AsSuperPropertyReference()->home_object());
2138 VisitForAccumulatorValue(prop->key());
2139 Register scratch = ecx;
2140 Register scratch2 = edx;
2141 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2142 // stack: value, this, home_object; eax: key, edx: value
2143 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2144 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2145 __ mov(scratch, MemOperand(esp, 0)); // home_object
2146 __ mov(MemOperand(esp, kPointerSize), scratch);
2147 __ mov(MemOperand(esp, 0), eax);
2148 __ mov(eax, scratch2);
2149 // stack: this, home_object, key; eax: value.
2150 EmitKeyedSuperPropertyStore(prop);
2151 break;
2152 }
2153 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002154 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002155 VisitForStackValue(prop->obj());
2156 VisitForAccumulatorValue(prop->key());
2157 __ Move(StoreDescriptor::NameRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002158 PopOperand(StoreDescriptor::ReceiverRegister()); // Receiver.
2159 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 EmitLoadStoreICSlot(slot);
2161 Handle<Code> ic =
2162 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2163 CallIC(ic);
2164 break;
2165 }
2166 }
2167 context()->Plug(eax);
2168}
2169
2170
2171void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2172 Variable* var, MemOperand location) {
2173 __ mov(location, eax);
2174 if (var->IsContextSlot()) {
2175 __ mov(edx, eax);
2176 int offset = Context::SlotOffset(var->index());
2177 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2178 }
2179}
2180
2181
2182void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2183 FeedbackVectorSlot slot) {
2184 if (var->IsUnallocated()) {
2185 // Global var, const, or let.
2186 __ mov(StoreDescriptor::NameRegister(), var->name());
2187 __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2188 __ mov(StoreDescriptor::ReceiverRegister(),
2189 ContextOperand(StoreDescriptor::ReceiverRegister(),
2190 Context::EXTENSION_INDEX));
2191 EmitLoadStoreICSlot(slot);
2192 CallStoreIC();
2193
2194 } else if (var->mode() == LET && op != Token::INIT) {
2195 // Non-initializing assignment to let variable needs a write barrier.
2196 DCHECK(!var->IsLookupSlot());
2197 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2198 Label assign;
2199 MemOperand location = VarOperand(var, ecx);
2200 __ mov(edx, location);
2201 __ cmp(edx, isolate()->factory()->the_hole_value());
2202 __ j(not_equal, &assign, Label::kNear);
2203 __ push(Immediate(var->name()));
2204 __ CallRuntime(Runtime::kThrowReferenceError);
2205 __ bind(&assign);
2206 EmitStoreToStackLocalOrContextSlot(var, location);
2207
2208 } else if (var->mode() == CONST && op != Token::INIT) {
2209 // Assignment to const variable needs a write barrier.
2210 DCHECK(!var->IsLookupSlot());
2211 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2212 Label const_error;
2213 MemOperand location = VarOperand(var, ecx);
2214 __ mov(edx, location);
2215 __ cmp(edx, isolate()->factory()->the_hole_value());
2216 __ j(not_equal, &const_error, Label::kNear);
2217 __ push(Immediate(var->name()));
2218 __ CallRuntime(Runtime::kThrowReferenceError);
2219 __ bind(&const_error);
2220 __ CallRuntime(Runtime::kThrowConstAssignError);
2221
2222 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2223 // Initializing assignment to const {this} needs a write barrier.
2224 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2225 Label uninitialized_this;
2226 MemOperand location = VarOperand(var, ecx);
2227 __ mov(edx, location);
2228 __ cmp(edx, isolate()->factory()->the_hole_value());
2229 __ j(equal, &uninitialized_this);
2230 __ push(Immediate(var->name()));
2231 __ CallRuntime(Runtime::kThrowReferenceError);
2232 __ bind(&uninitialized_this);
2233 EmitStoreToStackLocalOrContextSlot(var, location);
2234
2235 } else if (!var->is_const_mode() ||
2236 (var->mode() == CONST && op == Token::INIT)) {
2237 if (var->IsLookupSlot()) {
2238 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002239 __ Push(Immediate(var->name()));
2240 __ Push(eax);
2241 __ CallRuntime(is_strict(language_mode())
2242 ? Runtime::kStoreLookupSlot_Strict
2243 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002244 } else {
2245 // Assignment to var or initializing assignment to let/const in harmony
2246 // mode.
2247 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2248 MemOperand location = VarOperand(var, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002249 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002250 // Check for an uninitialized let binding.
2251 __ mov(edx, location);
2252 __ cmp(edx, isolate()->factory()->the_hole_value());
2253 __ Check(equal, kLetBindingReInitialization);
2254 }
2255 EmitStoreToStackLocalOrContextSlot(var, location);
2256 }
2257
2258 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2259 // Const initializers need a write barrier.
2260 DCHECK(!var->IsParameter()); // No const parameters.
2261 if (var->IsLookupSlot()) {
2262 __ push(eax);
2263 __ push(esi);
2264 __ push(Immediate(var->name()));
2265 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2266 } else {
2267 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2268 Label skip;
2269 MemOperand location = VarOperand(var, ecx);
2270 __ mov(edx, location);
2271 __ cmp(edx, isolate()->factory()->the_hole_value());
2272 __ j(not_equal, &skip, Label::kNear);
2273 EmitStoreToStackLocalOrContextSlot(var, location);
2274 __ bind(&skip);
2275 }
2276
2277 } else {
2278 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2279 if (is_strict(language_mode())) {
2280 __ CallRuntime(Runtime::kThrowConstAssignError);
2281 }
2282 // Silently ignore store in sloppy mode.
2283 }
2284}
2285
2286
2287void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2288 // Assignment to a property, using a named store IC.
2289 // eax : value
2290 // esp[0] : receiver
2291 Property* prop = expr->target()->AsProperty();
2292 DCHECK(prop != NULL);
2293 DCHECK(prop->key()->IsLiteral());
2294
2295 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002296 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002297 EmitLoadStoreICSlot(expr->AssignmentSlot());
2298 CallStoreIC();
2299 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2300 context()->Plug(eax);
2301}
2302
2303
2304void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2305 // Assignment to named property of super.
2306 // eax : value
2307 // stack : receiver ('this'), home_object
2308 DCHECK(prop != NULL);
2309 Literal* key = prop->key()->AsLiteral();
2310 DCHECK(key != NULL);
2311
Ben Murdoch097c5b22016-05-18 11:27:45 +01002312 PushOperand(key->value());
2313 PushOperand(eax);
2314 CallRuntimeWithOperands(is_strict(language_mode())
2315 ? Runtime::kStoreToSuper_Strict
2316 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002317}
2318
2319
2320void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2321 // Assignment to named property of super.
2322 // eax : value
2323 // stack : receiver ('this'), home_object, key
2324
Ben Murdoch097c5b22016-05-18 11:27:45 +01002325 PushOperand(eax);
2326 CallRuntimeWithOperands(is_strict(language_mode())
2327 ? Runtime::kStoreKeyedToSuper_Strict
2328 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002329}
2330
2331
2332void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2333 // Assignment to a property, using a keyed store IC.
2334 // eax : value
2335 // esp[0] : key
2336 // esp[kPointerSize] : receiver
2337
Ben Murdoch097c5b22016-05-18 11:27:45 +01002338 PopOperand(StoreDescriptor::NameRegister()); // Key.
2339 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002340 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2341 Handle<Code> ic =
2342 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2343 EmitLoadStoreICSlot(expr->AssignmentSlot());
2344 CallIC(ic);
2345 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2346 context()->Plug(eax);
2347}
2348
2349
2350void FullCodeGenerator::VisitProperty(Property* expr) {
2351 Comment cmnt(masm_, "[ Property");
2352 SetExpressionPosition(expr);
2353
2354 Expression* key = expr->key();
2355
2356 if (key->IsPropertyName()) {
2357 if (!expr->IsSuperAccess()) {
2358 VisitForAccumulatorValue(expr->obj());
2359 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2360 EmitNamedPropertyLoad(expr);
2361 } else {
2362 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2363 VisitForStackValue(
2364 expr->obj()->AsSuperPropertyReference()->home_object());
2365 EmitNamedSuperPropertyLoad(expr);
2366 }
2367 } else {
2368 if (!expr->IsSuperAccess()) {
2369 VisitForStackValue(expr->obj());
2370 VisitForAccumulatorValue(expr->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002371 PopOperand(LoadDescriptor::ReceiverRegister()); // Object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002372 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2373 EmitKeyedPropertyLoad(expr);
2374 } else {
2375 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2376 VisitForStackValue(
2377 expr->obj()->AsSuperPropertyReference()->home_object());
2378 VisitForStackValue(expr->key());
2379 EmitKeyedSuperPropertyLoad(expr);
2380 }
2381 }
2382 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2383 context()->Plug(eax);
2384}
2385
2386
2387void FullCodeGenerator::CallIC(Handle<Code> code,
2388 TypeFeedbackId ast_id) {
2389 ic_total_count_++;
2390 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2391}
2392
2393
2394// Code common for calls using the IC.
2395void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2396 Expression* callee = expr->expression();
2397
2398 // Get the target function.
2399 ConvertReceiverMode convert_mode;
2400 if (callee->IsVariableProxy()) {
2401 { StackValueContext context(this);
2402 EmitVariableLoad(callee->AsVariableProxy());
2403 PrepareForBailout(callee, NO_REGISTERS);
2404 }
2405 // Push undefined as receiver. This is patched in the method prologue if it
2406 // is a sloppy mode method.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002407 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002408 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2409 } else {
2410 // Load the function from the receiver.
2411 DCHECK(callee->IsProperty());
2412 DCHECK(!callee->AsProperty()->IsSuperAccess());
2413 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2414 EmitNamedPropertyLoad(callee->AsProperty());
2415 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2416 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002417 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002418 __ mov(Operand(esp, kPointerSize), eax);
2419 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2420 }
2421
2422 EmitCall(expr, convert_mode);
2423}
2424
2425
2426void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2427 SetExpressionPosition(expr);
2428 Expression* callee = expr->expression();
2429 DCHECK(callee->IsProperty());
2430 Property* prop = callee->AsProperty();
2431 DCHECK(prop->IsSuperAccess());
2432
2433 Literal* key = prop->key()->AsLiteral();
2434 DCHECK(!key->value()->IsSmi());
2435 // Load the function from the receiver.
2436 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2437 VisitForStackValue(super_ref->home_object());
2438 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002439 PushOperand(eax);
2440 PushOperand(eax);
2441 PushOperand(Operand(esp, kPointerSize * 2));
2442 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002443 // Stack here:
2444 // - home_object
2445 // - this (receiver)
2446 // - this (receiver) <-- LoadFromSuper will pop here and below.
2447 // - home_object
2448 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002449 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002450
2451 // Replace home_object with target function.
2452 __ mov(Operand(esp, kPointerSize), eax);
2453
2454 // Stack here:
2455 // - target function
2456 // - this (receiver)
2457 EmitCall(expr);
2458}
2459
2460
2461// Code common for calls using the IC.
2462void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2463 Expression* key) {
2464 // Load the key.
2465 VisitForAccumulatorValue(key);
2466
2467 Expression* callee = expr->expression();
2468
2469 // Load the function from the receiver.
2470 DCHECK(callee->IsProperty());
2471 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2472 __ mov(LoadDescriptor::NameRegister(), eax);
2473 EmitKeyedPropertyLoad(callee->AsProperty());
2474 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2475
2476 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002477 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002478 __ mov(Operand(esp, kPointerSize), eax);
2479
2480 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2481}
2482
2483
2484void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2485 Expression* callee = expr->expression();
2486 DCHECK(callee->IsProperty());
2487 Property* prop = callee->AsProperty();
2488 DCHECK(prop->IsSuperAccess());
2489
2490 SetExpressionPosition(prop);
2491 // Load the function from the receiver.
2492 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2493 VisitForStackValue(super_ref->home_object());
2494 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002495 PushOperand(eax);
2496 PushOperand(eax);
2497 PushOperand(Operand(esp, kPointerSize * 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002498 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002499 // Stack here:
2500 // - home_object
2501 // - this (receiver)
2502 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2503 // - home_object
2504 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002505 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002506
2507 // Replace home_object with target function.
2508 __ mov(Operand(esp, kPointerSize), eax);
2509
2510 // Stack here:
2511 // - target function
2512 // - this (receiver)
2513 EmitCall(expr);
2514}
2515
2516
2517void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2518 // Load the arguments.
2519 ZoneList<Expression*>* args = expr->arguments();
2520 int arg_count = args->length();
2521 for (int i = 0; i < arg_count; i++) {
2522 VisitForStackValue(args->at(i));
2523 }
2524
2525 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002526 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002527 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2528 if (FLAG_trace) {
2529 __ CallRuntime(Runtime::kTraceTailCall);
2530 }
2531 // Update profiling counters before the tail call since we will
2532 // not return to this function.
2533 EmitProfilingCounterHandlingForReturnSequence(true);
2534 }
2535 Handle<Code> ic =
2536 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2537 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002538 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2539 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2540 // Don't assign a type feedback id to the IC, since type feedback is provided
2541 // by the vector above.
2542 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002543 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002544
2545 RecordJSReturnSite(expr);
2546
2547 // Restore context register.
2548 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2549
2550 context()->DropAndPlug(1, eax);
2551}
2552
2553
2554void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2555 // Push copy of the first argument or undefined if it doesn't exist.
2556 if (arg_count > 0) {
2557 __ push(Operand(esp, arg_count * kPointerSize));
2558 } else {
2559 __ push(Immediate(isolate()->factory()->undefined_value()));
2560 }
2561
2562 // Push the enclosing function.
2563 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2564
2565 // Push the language mode.
2566 __ push(Immediate(Smi::FromInt(language_mode())));
2567
2568 // Push the start position of the scope the calls resides in.
2569 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2570
2571 // Do the runtime call.
2572 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2573}
2574
2575
2576// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2577void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2578 VariableProxy* callee = expr->expression()->AsVariableProxy();
2579 if (callee->var()->IsLookupSlot()) {
2580 Label slow, done;
2581 SetExpressionPosition(callee);
2582 // Generate code for loading from variables potentially shadowed by
2583 // eval-introduced variables.
2584 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2585
2586 __ bind(&slow);
2587 // Call the runtime to find the function to call (returned in eax) and
2588 // the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002589 __ Push(callee->name());
2590 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2591 PushOperand(eax); // Function.
2592 PushOperand(edx); // Receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002593 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2594
2595 // If fast case code has been generated, emit code to push the function
2596 // and receiver and have the slow path jump around this code.
2597 if (done.is_linked()) {
2598 Label call;
2599 __ jmp(&call, Label::kNear);
2600 __ bind(&done);
2601 // Push function.
2602 __ push(eax);
2603 // The receiver is implicitly the global receiver. Indicate this by
2604 // passing the hole to the call function stub.
2605 __ push(Immediate(isolate()->factory()->undefined_value()));
2606 __ bind(&call);
2607 }
2608 } else {
2609 VisitForStackValue(callee);
2610 // refEnv.WithBaseObject()
Ben Murdoch097c5b22016-05-18 11:27:45 +01002611 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002612 }
2613}
2614
2615
2616void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2617 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2618 // to resolve the function we need to call. Then we call the resolved
2619 // function using the given arguments.
2620 ZoneList<Expression*>* args = expr->arguments();
2621 int arg_count = args->length();
2622
2623 PushCalleeAndWithBaseObject(expr);
2624
2625 // Push the arguments.
2626 for (int i = 0; i < arg_count; i++) {
2627 VisitForStackValue(args->at(i));
2628 }
2629
2630 // Push a copy of the function (found below the arguments) and
2631 // resolve eval.
2632 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2633 EmitResolvePossiblyDirectEval(arg_count);
2634
2635 // Touch up the stack with the resolved function.
2636 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2637
2638 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2639
2640 SetCallPosition(expr);
2641 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2642 __ Set(eax, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002643 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2644 expr->tail_call_mode()),
2645 RelocInfo::CODE_TARGET);
2646 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002647 RecordJSReturnSite(expr);
2648 // Restore context register.
2649 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2650 context()->DropAndPlug(1, eax);
2651}
2652
2653
2654void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2655 Comment cmnt(masm_, "[ CallNew");
2656 // According to ECMA-262, section 11.2.2, page 44, the function
2657 // expression in new calls must be evaluated before the
2658 // arguments.
2659
2660 // Push constructor on the stack. If it's not a function it's used as
2661 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2662 // ignored.
2663 DCHECK(!expr->expression()->IsSuperPropertyReference());
2664 VisitForStackValue(expr->expression());
2665
2666 // Push the arguments ("left-to-right") on the stack.
2667 ZoneList<Expression*>* args = expr->arguments();
2668 int arg_count = args->length();
2669 for (int i = 0; i < arg_count; i++) {
2670 VisitForStackValue(args->at(i));
2671 }
2672
2673 // Call the construct call builtin that handles allocation and
2674 // constructor invocation.
2675 SetConstructCallPosition(expr);
2676
2677 // Load function and argument count into edi and eax.
2678 __ Move(eax, Immediate(arg_count));
2679 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2680
2681 // Record call targets in unoptimized code.
2682 __ EmitLoadTypeFeedbackVector(ebx);
2683 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2684
2685 CallConstructStub stub(isolate());
2686 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002687 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002688 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2689 // Restore context register.
2690 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2691 context()->Plug(eax);
2692}
2693
2694
2695void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2696 SuperCallReference* super_call_ref =
2697 expr->expression()->AsSuperCallReference();
2698 DCHECK_NOT_NULL(super_call_ref);
2699
2700 // Push the super constructor target on the stack (may be null,
2701 // but the Construct builtin can deal with that properly).
2702 VisitForAccumulatorValue(super_call_ref->this_function_var());
2703 __ AssertFunction(result_register());
2704 __ mov(result_register(),
2705 FieldOperand(result_register(), HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002706 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002707
2708 // Push the arguments ("left-to-right") on the stack.
2709 ZoneList<Expression*>* args = expr->arguments();
2710 int arg_count = args->length();
2711 for (int i = 0; i < arg_count; i++) {
2712 VisitForStackValue(args->at(i));
2713 }
2714
2715 // Call the construct call builtin that handles allocation and
2716 // constructor invocation.
2717 SetConstructCallPosition(expr);
2718
2719 // Load new target into edx.
2720 VisitForAccumulatorValue(super_call_ref->new_target_var());
2721 __ mov(edx, result_register());
2722
2723 // Load function and argument count into edi and eax.
2724 __ Move(eax, Immediate(arg_count));
2725 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2726
2727 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002728 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002729
2730 RecordJSReturnSite(expr);
2731
2732 // Restore context register.
2733 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2734 context()->Plug(eax);
2735}
2736
2737
2738void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2739 ZoneList<Expression*>* args = expr->arguments();
2740 DCHECK(args->length() == 1);
2741
2742 VisitForAccumulatorValue(args->at(0));
2743
2744 Label materialize_true, materialize_false;
2745 Label* if_true = NULL;
2746 Label* if_false = NULL;
2747 Label* fall_through = NULL;
2748 context()->PrepareTest(&materialize_true, &materialize_false,
2749 &if_true, &if_false, &fall_through);
2750
2751 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2752 __ test(eax, Immediate(kSmiTagMask));
2753 Split(zero, if_true, if_false, fall_through);
2754
2755 context()->Plug(if_true, if_false);
2756}
2757
2758
2759void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2760 ZoneList<Expression*>* args = expr->arguments();
2761 DCHECK(args->length() == 1);
2762
2763 VisitForAccumulatorValue(args->at(0));
2764
2765 Label materialize_true, materialize_false;
2766 Label* if_true = NULL;
2767 Label* if_false = NULL;
2768 Label* fall_through = NULL;
2769 context()->PrepareTest(&materialize_true, &materialize_false,
2770 &if_true, &if_false, &fall_through);
2771
2772 __ JumpIfSmi(eax, if_false);
2773 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2774 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2775 Split(above_equal, if_true, if_false, fall_through);
2776
2777 context()->Plug(if_true, if_false);
2778}
2779
2780
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002781void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2782 ZoneList<Expression*>* args = expr->arguments();
2783 DCHECK(args->length() == 1);
2784
2785 VisitForAccumulatorValue(args->at(0));
2786
2787 Label materialize_true, materialize_false;
2788 Label* if_true = NULL;
2789 Label* if_false = NULL;
2790 Label* fall_through = NULL;
2791 context()->PrepareTest(&materialize_true, &materialize_false,
2792 &if_true, &if_false, &fall_through);
2793
2794 __ JumpIfSmi(eax, if_false);
2795 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2796 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2797 Split(equal, if_true, if_false, fall_through);
2798
2799 context()->Plug(if_true, if_false);
2800}
2801
2802
2803void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2804 ZoneList<Expression*>* args = expr->arguments();
2805 DCHECK(args->length() == 1);
2806
2807 VisitForAccumulatorValue(args->at(0));
2808
2809 Label materialize_true, materialize_false;
2810 Label* if_true = NULL;
2811 Label* if_false = NULL;
2812 Label* fall_through = NULL;
2813 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2814 &if_false, &fall_through);
2815
2816 __ JumpIfSmi(eax, if_false);
2817 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2818 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2819 Split(equal, if_true, if_false, fall_through);
2820
2821 context()->Plug(if_true, if_false);
2822}
2823
2824
2825void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2826 ZoneList<Expression*>* args = expr->arguments();
2827 DCHECK(args->length() == 1);
2828
2829 VisitForAccumulatorValue(args->at(0));
2830
2831 Label materialize_true, materialize_false;
2832 Label* if_true = NULL;
2833 Label* if_false = NULL;
2834 Label* fall_through = NULL;
2835 context()->PrepareTest(&materialize_true, &materialize_false,
2836 &if_true, &if_false, &fall_through);
2837
2838 __ JumpIfSmi(eax, if_false);
2839 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2840 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2841 Split(equal, if_true, if_false, fall_through);
2842
2843 context()->Plug(if_true, if_false);
2844}
2845
2846
2847void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2848 ZoneList<Expression*>* args = expr->arguments();
2849 DCHECK(args->length() == 1);
2850
2851 VisitForAccumulatorValue(args->at(0));
2852
2853 Label materialize_true, materialize_false;
2854 Label* if_true = NULL;
2855 Label* if_false = NULL;
2856 Label* fall_through = NULL;
2857 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2858 &if_false, &fall_through);
2859
2860 __ JumpIfSmi(eax, if_false);
2861 __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2862 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2863 Split(equal, if_true, if_false, fall_through);
2864
2865 context()->Plug(if_true, if_false);
2866}
2867
2868
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002869void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2870 ZoneList<Expression*>* args = expr->arguments();
2871 DCHECK(args->length() == 1);
2872 Label done, null, function, non_function_constructor;
2873
2874 VisitForAccumulatorValue(args->at(0));
2875
2876 // If the object is not a JSReceiver, we return null.
2877 __ JumpIfSmi(eax, &null, Label::kNear);
2878 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2879 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2880 __ j(below, &null, Label::kNear);
2881
Ben Murdochda12d292016-06-02 14:46:10 +01002882 // Return 'Function' for JSFunction and JSBoundFunction objects.
2883 __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
2884 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2885 __ j(above_equal, &function, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002886
2887 // Check if the constructor in the map is a JS function.
2888 __ GetMapConstructor(eax, eax, ebx);
2889 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2890 __ j(not_equal, &non_function_constructor, Label::kNear);
2891
2892 // eax now contains the constructor function. Grab the
2893 // instance class name from there.
2894 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2895 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2896 __ jmp(&done, Label::kNear);
2897
2898 // Non-JS objects have class null.
2899 __ bind(&null);
2900 __ mov(eax, isolate()->factory()->null_value());
2901 __ jmp(&done, Label::kNear);
2902
2903 // Functions have class 'Function'.
2904 __ bind(&function);
2905 __ mov(eax, isolate()->factory()->Function_string());
2906 __ jmp(&done, Label::kNear);
2907
2908 // Objects with a non-function constructor have class 'Object'.
2909 __ bind(&non_function_constructor);
2910 __ mov(eax, isolate()->factory()->Object_string());
2911
2912 // All done.
2913 __ bind(&done);
2914
2915 context()->Plug(eax);
2916}
2917
2918
2919void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2920 ZoneList<Expression*>* args = expr->arguments();
2921 DCHECK(args->length() == 1);
2922
2923 VisitForAccumulatorValue(args->at(0)); // Load the object.
2924
2925 Label done;
2926 // If the object is a smi return the object.
2927 __ JumpIfSmi(eax, &done, Label::kNear);
2928 // If the object is not a value type, return the object.
2929 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2930 __ j(not_equal, &done, Label::kNear);
2931 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2932
2933 __ bind(&done);
2934 context()->Plug(eax);
2935}
2936
2937
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002938void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2939 ZoneList<Expression*>* args = expr->arguments();
2940 DCHECK_EQ(3, args->length());
2941
2942 Register string = eax;
2943 Register index = ebx;
2944 Register value = ecx;
2945
2946 VisitForStackValue(args->at(0)); // index
2947 VisitForStackValue(args->at(1)); // value
2948 VisitForAccumulatorValue(args->at(2)); // string
2949
Ben Murdoch097c5b22016-05-18 11:27:45 +01002950 PopOperand(value);
2951 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002952
2953 if (FLAG_debug_code) {
2954 __ test(value, Immediate(kSmiTagMask));
2955 __ Check(zero, kNonSmiValue);
2956 __ test(index, Immediate(kSmiTagMask));
2957 __ Check(zero, kNonSmiValue);
2958 }
2959
2960 __ SmiUntag(value);
2961 __ SmiUntag(index);
2962
2963 if (FLAG_debug_code) {
2964 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2965 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
2966 }
2967
2968 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
2969 value);
2970 context()->Plug(string);
2971}
2972
2973
2974void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2975 ZoneList<Expression*>* args = expr->arguments();
2976 DCHECK_EQ(3, args->length());
2977
2978 Register string = eax;
2979 Register index = ebx;
2980 Register value = ecx;
2981
2982 VisitForStackValue(args->at(0)); // index
2983 VisitForStackValue(args->at(1)); // value
2984 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002985 PopOperand(value);
2986 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002987
2988 if (FLAG_debug_code) {
2989 __ test(value, Immediate(kSmiTagMask));
2990 __ Check(zero, kNonSmiValue);
2991 __ test(index, Immediate(kSmiTagMask));
2992 __ Check(zero, kNonSmiValue);
2993 __ SmiUntag(index);
2994 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2995 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
2996 __ SmiTag(index);
2997 }
2998
2999 __ SmiUntag(value);
3000 // No need to untag a smi for two-byte addressing.
3001 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3002 value);
3003 context()->Plug(string);
3004}
3005
3006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003007void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3008 ZoneList<Expression*>* args = expr->arguments();
3009 DCHECK(args->length() == 1);
3010
3011 VisitForAccumulatorValue(args->at(0));
3012
3013 Label done;
3014 StringCharFromCodeGenerator generator(eax, ebx);
3015 generator.GenerateFast(masm_);
3016 __ jmp(&done);
3017
3018 NopRuntimeCallHelper call_helper;
3019 generator.GenerateSlow(masm_, call_helper);
3020
3021 __ bind(&done);
3022 context()->Plug(ebx);
3023}
3024
3025
3026void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3027 ZoneList<Expression*>* args = expr->arguments();
3028 DCHECK(args->length() == 2);
3029
3030 VisitForStackValue(args->at(0));
3031 VisitForAccumulatorValue(args->at(1));
3032
3033 Register object = ebx;
3034 Register index = eax;
3035 Register result = edx;
3036
Ben Murdoch097c5b22016-05-18 11:27:45 +01003037 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003038
3039 Label need_conversion;
3040 Label index_out_of_range;
3041 Label done;
3042 StringCharCodeAtGenerator generator(object,
3043 index,
3044 result,
3045 &need_conversion,
3046 &need_conversion,
3047 &index_out_of_range,
3048 STRING_INDEX_IS_NUMBER);
3049 generator.GenerateFast(masm_);
3050 __ jmp(&done);
3051
3052 __ bind(&index_out_of_range);
3053 // When the index is out of range, the spec requires us to return
3054 // NaN.
3055 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3056 __ jmp(&done);
3057
3058 __ bind(&need_conversion);
3059 // Move the undefined value into the result register, which will
3060 // trigger conversion.
3061 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3062 __ jmp(&done);
3063
3064 NopRuntimeCallHelper call_helper;
3065 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3066
3067 __ bind(&done);
3068 context()->Plug(result);
3069}
3070
3071
3072void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3073 ZoneList<Expression*>* args = expr->arguments();
3074 DCHECK(args->length() == 2);
3075
3076 VisitForStackValue(args->at(0));
3077 VisitForAccumulatorValue(args->at(1));
3078
3079 Register object = ebx;
3080 Register index = eax;
3081 Register scratch = edx;
3082 Register result = eax;
3083
Ben Murdoch097c5b22016-05-18 11:27:45 +01003084 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003085
3086 Label need_conversion;
3087 Label index_out_of_range;
3088 Label done;
3089 StringCharAtGenerator generator(object,
3090 index,
3091 scratch,
3092 result,
3093 &need_conversion,
3094 &need_conversion,
3095 &index_out_of_range,
3096 STRING_INDEX_IS_NUMBER);
3097 generator.GenerateFast(masm_);
3098 __ jmp(&done);
3099
3100 __ bind(&index_out_of_range);
3101 // When the index is out of range, the spec requires us to return
3102 // the empty string.
3103 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3104 __ jmp(&done);
3105
3106 __ bind(&need_conversion);
3107 // Move smi zero into the result register, which will trigger
3108 // conversion.
3109 __ Move(result, Immediate(Smi::FromInt(0)));
3110 __ jmp(&done);
3111
3112 NopRuntimeCallHelper call_helper;
3113 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3114
3115 __ bind(&done);
3116 context()->Plug(result);
3117}
3118
3119
3120void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3121 ZoneList<Expression*>* args = expr->arguments();
3122 DCHECK_LE(2, args->length());
3123 // Push target, receiver and arguments onto the stack.
3124 for (Expression* const arg : *args) {
3125 VisitForStackValue(arg);
3126 }
3127 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3128 // Move target to edi.
3129 int const argc = args->length() - 2;
3130 __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
3131 // Call the target.
3132 __ mov(eax, Immediate(argc));
3133 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003134 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003135 // Restore context register.
3136 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3137 // Discard the function left on TOS.
3138 context()->DropAndPlug(1, eax);
3139}
3140
3141
3142void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3143 ZoneList<Expression*>* args = expr->arguments();
3144 DCHECK(args->length() == 1);
3145
3146 VisitForAccumulatorValue(args->at(0));
3147
3148 __ AssertString(eax);
3149
3150 Label materialize_true, materialize_false;
3151 Label* if_true = NULL;
3152 Label* if_false = NULL;
3153 Label* fall_through = NULL;
3154 context()->PrepareTest(&materialize_true, &materialize_false,
3155 &if_true, &if_false, &fall_through);
3156
3157 __ test(FieldOperand(eax, String::kHashFieldOffset),
3158 Immediate(String::kContainsCachedArrayIndexMask));
3159 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3160 Split(zero, if_true, if_false, fall_through);
3161
3162 context()->Plug(if_true, if_false);
3163}
3164
3165
3166void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3167 ZoneList<Expression*>* args = expr->arguments();
3168 DCHECK(args->length() == 1);
3169 VisitForAccumulatorValue(args->at(0));
3170
3171 __ AssertString(eax);
3172
3173 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3174 __ IndexFromHash(eax, eax);
3175
3176 context()->Plug(eax);
3177}
3178
3179
3180void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3181 ZoneList<Expression*>* args = expr->arguments();
3182 DCHECK_EQ(1, args->length());
3183 VisitForAccumulatorValue(args->at(0));
3184 __ AssertFunction(eax);
3185 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3186 __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
3187 context()->Plug(eax);
3188}
3189
Ben Murdochda12d292016-06-02 14:46:10 +01003190void FullCodeGenerator::EmitGetOrdinaryHasInstance(CallRuntime* expr) {
3191 DCHECK_EQ(0, expr->arguments()->length());
3192 __ mov(eax, NativeContextOperand());
3193 __ mov(eax, ContextOperand(eax, Context::ORDINARY_HAS_INSTANCE_INDEX));
3194 context()->Plug(eax);
3195}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003196
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003197void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3198 DCHECK(expr->arguments()->length() == 0);
3199 ExternalReference debug_is_active =
3200 ExternalReference::debug_is_active_address(isolate());
3201 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
3202 __ SmiTag(eax);
3203 context()->Plug(eax);
3204}
3205
3206
3207void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3208 ZoneList<Expression*>* args = expr->arguments();
3209 DCHECK_EQ(2, args->length());
3210 VisitForStackValue(args->at(0));
3211 VisitForStackValue(args->at(1));
3212
3213 Label runtime, done;
3214
3215 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime, TAG_OBJECT);
3216 __ mov(ebx, NativeContextOperand());
3217 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
3218 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
3219 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3220 isolate()->factory()->empty_fixed_array());
3221 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3222 isolate()->factory()->empty_fixed_array());
3223 __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
3224 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
3225 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3226 __ jmp(&done, Label::kNear);
3227
3228 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003229 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003230
3231 __ bind(&done);
3232 context()->Plug(eax);
3233}
3234
3235
3236void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003237 // Push function.
3238 __ LoadGlobalFunction(expr->context_index(), eax);
3239 PushOperand(eax);
3240
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003241 // Push undefined as receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003242 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003243}
3244
3245
3246void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3247 ZoneList<Expression*>* args = expr->arguments();
3248 int arg_count = args->length();
3249
3250 SetCallPosition(expr);
3251 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3252 __ Set(eax, arg_count);
3253 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3254 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003255 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003256
Ben Murdochda12d292016-06-02 14:46:10 +01003257 // Restore context register.
3258 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003259}
3260
3261
3262void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3263 switch (expr->op()) {
3264 case Token::DELETE: {
3265 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3266 Property* property = expr->expression()->AsProperty();
3267 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3268
3269 if (property != NULL) {
3270 VisitForStackValue(property->obj());
3271 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003272 CallRuntimeWithOperands(is_strict(language_mode())
3273 ? Runtime::kDeleteProperty_Strict
3274 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003275 context()->Plug(eax);
3276 } else if (proxy != NULL) {
3277 Variable* var = proxy->var();
3278 // Delete of an unqualified identifier is disallowed in strict mode but
3279 // "delete this" is allowed.
3280 bool is_this = var->HasThisName(isolate());
3281 DCHECK(is_sloppy(language_mode()) || is_this);
3282 if (var->IsUnallocatedOrGlobalSlot()) {
3283 __ mov(eax, NativeContextOperand());
3284 __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
3285 __ push(Immediate(var->name()));
3286 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3287 context()->Plug(eax);
3288 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3289 // Result of deleting non-global variables is false. 'this' is
3290 // not really a variable, though we implement it as one. The
3291 // subexpression does not have side effects.
3292 context()->Plug(is_this);
3293 } else {
3294 // Non-global variable. Call the runtime to try to delete from the
3295 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003296 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003297 __ CallRuntime(Runtime::kDeleteLookupSlot);
3298 context()->Plug(eax);
3299 }
3300 } else {
3301 // Result of deleting non-property, non-variable reference is true.
3302 // The subexpression may have side effects.
3303 VisitForEffect(expr->expression());
3304 context()->Plug(true);
3305 }
3306 break;
3307 }
3308
3309 case Token::VOID: {
3310 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3311 VisitForEffect(expr->expression());
3312 context()->Plug(isolate()->factory()->undefined_value());
3313 break;
3314 }
3315
3316 case Token::NOT: {
3317 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3318 if (context()->IsEffect()) {
3319 // Unary NOT has no side effects so it's only necessary to visit the
3320 // subexpression. Match the optimizing compiler by not branching.
3321 VisitForEffect(expr->expression());
3322 } else if (context()->IsTest()) {
3323 const TestContext* test = TestContext::cast(context());
3324 // The labels are swapped for the recursive call.
3325 VisitForControl(expr->expression(),
3326 test->false_label(),
3327 test->true_label(),
3328 test->fall_through());
3329 context()->Plug(test->true_label(), test->false_label());
3330 } else {
3331 // We handle value contexts explicitly rather than simply visiting
3332 // for control and plugging the control flow into the context,
3333 // because we need to prepare a pair of extra administrative AST ids
3334 // for the optimizing compiler.
3335 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3336 Label materialize_true, materialize_false, done;
3337 VisitForControl(expr->expression(),
3338 &materialize_false,
3339 &materialize_true,
3340 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003341 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003342 __ bind(&materialize_true);
3343 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3344 if (context()->IsAccumulatorValue()) {
3345 __ mov(eax, isolate()->factory()->true_value());
3346 } else {
3347 __ Push(isolate()->factory()->true_value());
3348 }
3349 __ jmp(&done, Label::kNear);
3350 __ bind(&materialize_false);
3351 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3352 if (context()->IsAccumulatorValue()) {
3353 __ mov(eax, isolate()->factory()->false_value());
3354 } else {
3355 __ Push(isolate()->factory()->false_value());
3356 }
3357 __ bind(&done);
3358 }
3359 break;
3360 }
3361
3362 case Token::TYPEOF: {
3363 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3364 {
3365 AccumulatorValueContext context(this);
3366 VisitForTypeofValue(expr->expression());
3367 }
3368 __ mov(ebx, eax);
3369 TypeofStub typeof_stub(isolate());
3370 __ CallStub(&typeof_stub);
3371 context()->Plug(eax);
3372 break;
3373 }
3374
3375 default:
3376 UNREACHABLE();
3377 }
3378}
3379
3380
3381void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3382 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3383
3384 Comment cmnt(masm_, "[ CountOperation");
3385
3386 Property* prop = expr->expression()->AsProperty();
3387 LhsKind assign_type = Property::GetAssignType(prop);
3388
3389 // Evaluate expression and get value.
3390 if (assign_type == VARIABLE) {
3391 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3392 AccumulatorValueContext context(this);
3393 EmitVariableLoad(expr->expression()->AsVariableProxy());
3394 } else {
3395 // Reserve space for result of postfix operation.
3396 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003397 PushOperand(Smi::FromInt(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003398 }
3399 switch (assign_type) {
3400 case NAMED_PROPERTY: {
3401 // Put the object both on the stack and in the register.
3402 VisitForStackValue(prop->obj());
3403 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
3404 EmitNamedPropertyLoad(prop);
3405 break;
3406 }
3407
3408 case NAMED_SUPER_PROPERTY: {
3409 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3410 VisitForAccumulatorValue(
3411 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003412 PushOperand(result_register());
3413 PushOperand(MemOperand(esp, kPointerSize));
3414 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003415 EmitNamedSuperPropertyLoad(prop);
3416 break;
3417 }
3418
3419 case KEYED_SUPER_PROPERTY: {
3420 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3421 VisitForStackValue(
3422 prop->obj()->AsSuperPropertyReference()->home_object());
3423 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003424 PushOperand(result_register());
3425 PushOperand(MemOperand(esp, 2 * kPointerSize));
3426 PushOperand(MemOperand(esp, 2 * kPointerSize));
3427 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003428 EmitKeyedSuperPropertyLoad(prop);
3429 break;
3430 }
3431
3432 case KEYED_PROPERTY: {
3433 VisitForStackValue(prop->obj());
3434 VisitForStackValue(prop->key());
3435 __ mov(LoadDescriptor::ReceiverRegister(),
3436 Operand(esp, kPointerSize)); // Object.
3437 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
3438 EmitKeyedPropertyLoad(prop);
3439 break;
3440 }
3441
3442 case VARIABLE:
3443 UNREACHABLE();
3444 }
3445 }
3446
3447 // We need a second deoptimization point after loading the value
3448 // in case evaluating the property load my have a side effect.
3449 if (assign_type == VARIABLE) {
3450 PrepareForBailout(expr->expression(), TOS_REG);
3451 } else {
3452 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3453 }
3454
3455 // Inline smi case if we are in a loop.
3456 Label done, stub_call;
3457 JumpPatchSite patch_site(masm_);
3458 if (ShouldInlineSmiCase(expr->op())) {
3459 Label slow;
3460 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
3461
3462 // Save result for postfix expressions.
3463 if (expr->is_postfix()) {
3464 if (!context()->IsEffect()) {
3465 // Save the result on the stack. If we have a named or keyed property
3466 // we store the result under the receiver that is currently on top
3467 // of the stack.
3468 switch (assign_type) {
3469 case VARIABLE:
3470 __ push(eax);
3471 break;
3472 case NAMED_PROPERTY:
3473 __ mov(Operand(esp, kPointerSize), eax);
3474 break;
3475 case NAMED_SUPER_PROPERTY:
3476 __ mov(Operand(esp, 2 * kPointerSize), eax);
3477 break;
3478 case KEYED_PROPERTY:
3479 __ mov(Operand(esp, 2 * kPointerSize), eax);
3480 break;
3481 case KEYED_SUPER_PROPERTY:
3482 __ mov(Operand(esp, 3 * kPointerSize), eax);
3483 break;
3484 }
3485 }
3486 }
3487
3488 if (expr->op() == Token::INC) {
3489 __ add(eax, Immediate(Smi::FromInt(1)));
3490 } else {
3491 __ sub(eax, Immediate(Smi::FromInt(1)));
3492 }
3493 __ j(no_overflow, &done, Label::kNear);
3494 // Call stub. Undo operation first.
3495 if (expr->op() == Token::INC) {
3496 __ sub(eax, Immediate(Smi::FromInt(1)));
3497 } else {
3498 __ add(eax, Immediate(Smi::FromInt(1)));
3499 }
3500 __ jmp(&stub_call, Label::kNear);
3501 __ bind(&slow);
3502 }
Ben Murdochda12d292016-06-02 14:46:10 +01003503
3504 // Convert old value into a number.
3505 ToNumberStub convert_stub(isolate());
3506 __ CallStub(&convert_stub);
3507 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003508
3509 // Save result for postfix expressions.
3510 if (expr->is_postfix()) {
3511 if (!context()->IsEffect()) {
3512 // Save the result on the stack. If we have a named or keyed property
3513 // we store the result under the receiver that is currently on top
3514 // of the stack.
3515 switch (assign_type) {
3516 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003517 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003518 break;
3519 case NAMED_PROPERTY:
3520 __ mov(Operand(esp, kPointerSize), eax);
3521 break;
3522 case NAMED_SUPER_PROPERTY:
3523 __ mov(Operand(esp, 2 * kPointerSize), eax);
3524 break;
3525 case KEYED_PROPERTY:
3526 __ mov(Operand(esp, 2 * kPointerSize), eax);
3527 break;
3528 case KEYED_SUPER_PROPERTY:
3529 __ mov(Operand(esp, 3 * kPointerSize), eax);
3530 break;
3531 }
3532 }
3533 }
3534
3535 SetExpressionPosition(expr);
3536
3537 // Call stub for +1/-1.
3538 __ bind(&stub_call);
3539 __ mov(edx, eax);
3540 __ mov(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003541 Handle<Code> code =
3542 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003543 CallIC(code, expr->CountBinOpFeedbackId());
3544 patch_site.EmitPatchInfo();
3545 __ bind(&done);
3546
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003547 // Store the value returned in eax.
3548 switch (assign_type) {
3549 case VARIABLE:
3550 if (expr->is_postfix()) {
3551 // Perform the assignment as if via '='.
3552 { EffectContext context(this);
3553 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3554 Token::ASSIGN, expr->CountSlot());
3555 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3556 context.Plug(eax);
3557 }
3558 // For all contexts except EffectContext We have the result on
3559 // top of the stack.
3560 if (!context()->IsEffect()) {
3561 context()->PlugTOS();
3562 }
3563 } else {
3564 // Perform the assignment as if via '='.
3565 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3566 Token::ASSIGN, expr->CountSlot());
3567 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3568 context()->Plug(eax);
3569 }
3570 break;
3571 case NAMED_PROPERTY: {
3572 __ mov(StoreDescriptor::NameRegister(),
3573 prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003574 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003575 EmitLoadStoreICSlot(expr->CountSlot());
3576 CallStoreIC();
3577 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3578 if (expr->is_postfix()) {
3579 if (!context()->IsEffect()) {
3580 context()->PlugTOS();
3581 }
3582 } else {
3583 context()->Plug(eax);
3584 }
3585 break;
3586 }
3587 case NAMED_SUPER_PROPERTY: {
3588 EmitNamedSuperPropertyStore(prop);
3589 if (expr->is_postfix()) {
3590 if (!context()->IsEffect()) {
3591 context()->PlugTOS();
3592 }
3593 } else {
3594 context()->Plug(eax);
3595 }
3596 break;
3597 }
3598 case KEYED_SUPER_PROPERTY: {
3599 EmitKeyedSuperPropertyStore(prop);
3600 if (expr->is_postfix()) {
3601 if (!context()->IsEffect()) {
3602 context()->PlugTOS();
3603 }
3604 } else {
3605 context()->Plug(eax);
3606 }
3607 break;
3608 }
3609 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003610 PopOperand(StoreDescriptor::NameRegister());
3611 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003612 Handle<Code> ic =
3613 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3614 EmitLoadStoreICSlot(expr->CountSlot());
3615 CallIC(ic);
3616 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3617 if (expr->is_postfix()) {
3618 // Result is on the stack
3619 if (!context()->IsEffect()) {
3620 context()->PlugTOS();
3621 }
3622 } else {
3623 context()->Plug(eax);
3624 }
3625 break;
3626 }
3627 }
3628}
3629
3630
3631void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3632 Expression* sub_expr,
3633 Handle<String> check) {
3634 Label materialize_true, materialize_false;
3635 Label* if_true = NULL;
3636 Label* if_false = NULL;
3637 Label* fall_through = NULL;
3638 context()->PrepareTest(&materialize_true, &materialize_false,
3639 &if_true, &if_false, &fall_through);
3640
3641 { AccumulatorValueContext context(this);
3642 VisitForTypeofValue(sub_expr);
3643 }
3644 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3645
3646 Factory* factory = isolate()->factory();
3647 if (String::Equals(check, factory->number_string())) {
3648 __ JumpIfSmi(eax, if_true);
3649 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3650 isolate()->factory()->heap_number_map());
3651 Split(equal, if_true, if_false, fall_through);
3652 } else if (String::Equals(check, factory->string_string())) {
3653 __ JumpIfSmi(eax, if_false);
3654 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3655 Split(below, if_true, if_false, fall_through);
3656 } else if (String::Equals(check, factory->symbol_string())) {
3657 __ JumpIfSmi(eax, if_false);
3658 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3659 Split(equal, if_true, if_false, fall_through);
3660 } else if (String::Equals(check, factory->boolean_string())) {
3661 __ cmp(eax, isolate()->factory()->true_value());
3662 __ j(equal, if_true);
3663 __ cmp(eax, isolate()->factory()->false_value());
3664 Split(equal, if_true, if_false, fall_through);
3665 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003666 __ cmp(eax, isolate()->factory()->null_value());
3667 __ j(equal, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003668 __ JumpIfSmi(eax, if_false);
3669 // Check for undetectable objects => true.
3670 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3671 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01003672 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003673 Split(not_zero, if_true, if_false, fall_through);
3674 } else if (String::Equals(check, factory->function_string())) {
3675 __ JumpIfSmi(eax, if_false);
3676 // Check for callable and not undetectable objects => true.
3677 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3678 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3679 __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3680 __ cmp(ecx, 1 << Map::kIsCallable);
3681 Split(equal, if_true, if_false, fall_through);
3682 } else if (String::Equals(check, factory->object_string())) {
3683 __ JumpIfSmi(eax, if_false);
3684 __ cmp(eax, isolate()->factory()->null_value());
3685 __ j(equal, if_true);
3686 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3687 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3688 __ j(below, if_false);
3689 // Check for callable or undetectable objects => false.
3690 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01003691 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003692 Split(zero, if_true, if_false, fall_through);
3693// clang-format off
3694#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3695 } else if (String::Equals(check, factory->type##_string())) { \
3696 __ JumpIfSmi(eax, if_false); \
3697 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \
3698 isolate()->factory()->type##_map()); \
3699 Split(equal, if_true, if_false, fall_through);
3700 SIMD128_TYPES(SIMD128_TYPE)
3701#undef SIMD128_TYPE
3702 // clang-format on
3703 } else {
3704 if (if_false != fall_through) __ jmp(if_false);
3705 }
3706 context()->Plug(if_true, if_false);
3707}
3708
3709
3710void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3711 Comment cmnt(masm_, "[ CompareOperation");
3712 SetExpressionPosition(expr);
3713
3714 // First we try a fast inlined version of the compare when one of
3715 // the operands is a literal.
3716 if (TryLiteralCompare(expr)) return;
3717
3718 // Always perform the comparison for its control flow. Pack the result
3719 // into the expression's context after the comparison is performed.
3720 Label materialize_true, materialize_false;
3721 Label* if_true = NULL;
3722 Label* if_false = NULL;
3723 Label* fall_through = NULL;
3724 context()->PrepareTest(&materialize_true, &materialize_false,
3725 &if_true, &if_false, &fall_through);
3726
3727 Token::Value op = expr->op();
3728 VisitForStackValue(expr->left());
3729 switch (op) {
3730 case Token::IN:
3731 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003732 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003733 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3734 __ cmp(eax, isolate()->factory()->true_value());
3735 Split(equal, if_true, if_false, fall_through);
3736 break;
3737
3738 case Token::INSTANCEOF: {
3739 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003740 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003741 InstanceOfStub stub(isolate());
3742 __ CallStub(&stub);
3743 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3744 __ cmp(eax, isolate()->factory()->true_value());
3745 Split(equal, if_true, if_false, fall_through);
3746 break;
3747 }
3748
3749 default: {
3750 VisitForAccumulatorValue(expr->right());
3751 Condition cc = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003752 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003753
3754 bool inline_smi_code = ShouldInlineSmiCase(op);
3755 JumpPatchSite patch_site(masm_);
3756 if (inline_smi_code) {
3757 Label slow_case;
3758 __ mov(ecx, edx);
3759 __ or_(ecx, eax);
3760 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3761 __ cmp(edx, eax);
3762 Split(cc, if_true, if_false, NULL);
3763 __ bind(&slow_case);
3764 }
3765
Ben Murdoch097c5b22016-05-18 11:27:45 +01003766 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003767 CallIC(ic, expr->CompareOperationFeedbackId());
3768 patch_site.EmitPatchInfo();
3769
3770 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3771 __ test(eax, eax);
3772 Split(cc, if_true, if_false, fall_through);
3773 }
3774 }
3775
3776 // Convert the result of the comparison into one expected for this
3777 // expression's context.
3778 context()->Plug(if_true, if_false);
3779}
3780
3781
3782void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3783 Expression* sub_expr,
3784 NilValue nil) {
3785 Label materialize_true, materialize_false;
3786 Label* if_true = NULL;
3787 Label* if_false = NULL;
3788 Label* fall_through = NULL;
3789 context()->PrepareTest(&materialize_true, &materialize_false,
3790 &if_true, &if_false, &fall_through);
3791
3792 VisitForAccumulatorValue(sub_expr);
3793 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3794
3795 Handle<Object> nil_value = nil == kNullValue
3796 ? isolate()->factory()->null_value()
3797 : isolate()->factory()->undefined_value();
3798 if (expr->op() == Token::EQ_STRICT) {
3799 __ cmp(eax, nil_value);
3800 Split(equal, if_true, if_false, fall_through);
3801 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003802 __ JumpIfSmi(eax, if_false);
3803 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3804 __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
3805 Immediate(1 << Map::kIsUndetectable));
3806 Split(not_zero, if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003807 }
3808 context()->Plug(if_true, if_false);
3809}
3810
3811
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003812Register FullCodeGenerator::result_register() {
3813 return eax;
3814}
3815
3816
3817Register FullCodeGenerator::context_register() {
3818 return esi;
3819}
3820
Ben Murdochda12d292016-06-02 14:46:10 +01003821void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3822 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3823 __ mov(value, Operand(ebp, frame_offset));
3824}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003825
3826void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3827 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3828 __ mov(Operand(ebp, frame_offset), value);
3829}
3830
3831
3832void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3833 __ mov(dst, ContextOperand(esi, context_index));
3834}
3835
3836
3837void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3838 Scope* closure_scope = scope()->ClosureScope();
3839 if (closure_scope->is_script_scope() ||
3840 closure_scope->is_module_scope()) {
3841 // Contexts nested in the native context have a canonical empty function
3842 // as their closure, not the anonymous closure containing the global
3843 // code.
3844 __ mov(eax, NativeContextOperand());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003845 PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003846 } else if (closure_scope->is_eval_scope()) {
3847 // Contexts nested inside eval code have the same closure as the context
3848 // calling eval, not the anonymous closure containing the eval code.
3849 // Fetch it from the context.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003850 PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003851 } else {
3852 DCHECK(closure_scope->is_function_scope());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003853 PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003854 }
3855}
3856
3857
3858// ----------------------------------------------------------------------------
3859// Non-local control flow support.
3860
3861void FullCodeGenerator::EnterFinallyBlock() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003862 // Store pending message while executing finally block.
3863 ExternalReference pending_message_obj =
3864 ExternalReference::address_of_pending_message_obj(isolate());
3865 __ mov(edx, Operand::StaticVariable(pending_message_obj));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003866 PushOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003867
3868 ClearPendingMessage();
3869}
3870
3871
3872void FullCodeGenerator::ExitFinallyBlock() {
3873 DCHECK(!result_register().is(edx));
3874 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003875 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003876 ExternalReference pending_message_obj =
3877 ExternalReference::address_of_pending_message_obj(isolate());
3878 __ mov(Operand::StaticVariable(pending_message_obj), edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003879}
3880
3881
3882void FullCodeGenerator::ClearPendingMessage() {
3883 DCHECK(!result_register().is(edx));
3884 ExternalReference pending_message_obj =
3885 ExternalReference::address_of_pending_message_obj(isolate());
3886 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
3887 __ mov(Operand::StaticVariable(pending_message_obj), edx);
3888}
3889
3890
Ben Murdoch097c5b22016-05-18 11:27:45 +01003891void FullCodeGenerator::DeferredCommands::EmitCommands() {
3892 DCHECK(!result_register().is(edx));
3893 __ Pop(result_register()); // Restore the accumulator.
3894 __ Pop(edx); // Get the token.
3895 for (DeferredCommand cmd : commands_) {
3896 Label skip;
3897 __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
3898 __ j(not_equal, &skip);
3899 switch (cmd.command) {
3900 case kReturn:
3901 codegen_->EmitUnwindAndReturn();
3902 break;
3903 case kThrow:
3904 __ Push(result_register());
3905 __ CallRuntime(Runtime::kReThrow);
3906 break;
3907 case kContinue:
3908 codegen_->EmitContinue(cmd.target);
3909 break;
3910 case kBreak:
3911 codegen_->EmitBreak(cmd.target);
3912 break;
3913 }
3914 __ bind(&skip);
3915 }
3916}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003917
3918#undef __
3919
3920
3921static const byte kJnsInstruction = 0x79;
3922static const byte kJnsOffset = 0x11;
3923static const byte kNopByteOne = 0x66;
3924static const byte kNopByteTwo = 0x90;
3925#ifdef DEBUG
3926static const byte kCallInstruction = 0xe8;
3927#endif
3928
3929
3930void BackEdgeTable::PatchAt(Code* unoptimized_code,
3931 Address pc,
3932 BackEdgeState target_state,
3933 Code* replacement_code) {
3934 Address call_target_address = pc - kIntSize;
3935 Address jns_instr_address = call_target_address - 3;
3936 Address jns_offset_address = call_target_address - 2;
3937
3938 switch (target_state) {
3939 case INTERRUPT:
3940 // sub <profiling_counter>, <delta> ;; Not changed
3941 // jns ok
3942 // call <interrupt stub>
3943 // ok:
3944 *jns_instr_address = kJnsInstruction;
3945 *jns_offset_address = kJnsOffset;
3946 break;
3947 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003948 // sub <profiling_counter>, <delta> ;; Not changed
3949 // nop
3950 // nop
3951 // call <on-stack replacment>
3952 // ok:
3953 *jns_instr_address = kNopByteOne;
3954 *jns_offset_address = kNopByteTwo;
3955 break;
3956 }
3957
3958 Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3959 call_target_address, unoptimized_code,
3960 replacement_code->entry());
3961 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3962 unoptimized_code, call_target_address, replacement_code);
3963}
3964
3965
3966BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3967 Isolate* isolate,
3968 Code* unoptimized_code,
3969 Address pc) {
3970 Address call_target_address = pc - kIntSize;
3971 Address jns_instr_address = call_target_address - 3;
3972 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3973
3974 if (*jns_instr_address == kJnsInstruction) {
3975 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3976 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3977 Assembler::target_address_at(call_target_address,
3978 unoptimized_code));
3979 return INTERRUPT;
3980 }
3981
3982 DCHECK_EQ(kNopByteOne, *jns_instr_address);
3983 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3984
Ben Murdochda12d292016-06-02 14:46:10 +01003985 DCHECK_EQ(
3986 isolate->builtins()->OnStackReplacement()->entry(),
3987 Assembler::target_address_at(call_target_address, unoptimized_code));
3988 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003989}
3990
3991
3992} // namespace internal
3993} // namespace v8
3994
3995#endif // V8_TARGET_ARCH_IA32