blob: 1ef9ceebac7680eed83a20eca358b98e52140f6c [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X64
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16namespace v8 {
17namespace internal {
18
Ben Murdoch097c5b22016-05-18 11:27:45 +010019#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020
21class JumpPatchSite BASE_EMBEDDED {
22 public:
23 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
24#ifdef DEBUG
25 info_emitted_ = false;
26#endif
27 }
28
29 ~JumpPatchSite() {
30 DCHECK(patch_site_.is_bound() == info_emitted_);
31 }
32
33 void EmitJumpIfNotSmi(Register reg,
34 Label* target,
35 Label::Distance near_jump = Label::kFar) {
36 __ testb(reg, Immediate(kSmiTagMask));
37 EmitJump(not_carry, target, near_jump); // Always taken before patched.
38 }
39
40 void EmitJumpIfSmi(Register reg,
41 Label* target,
42 Label::Distance near_jump = Label::kFar) {
43 __ testb(reg, Immediate(kSmiTagMask));
44 EmitJump(carry, target, near_jump); // Never taken before patched.
45 }
46
47 void EmitPatchInfo() {
48 if (patch_site_.is_bound()) {
49 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
50 DCHECK(is_uint8(delta_to_patch_site));
51 __ testl(rax, Immediate(delta_to_patch_site));
52#ifdef DEBUG
53 info_emitted_ = true;
54#endif
55 } else {
56 __ nop(); // Signals no inlined code.
57 }
58 }
59
60 private:
61 // jc will be patched with jz, jnc will become jnz.
62 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
63 DCHECK(!patch_site_.is_bound() && !info_emitted_);
64 DCHECK(cc == carry || cc == not_carry);
65 __ bind(&patch_site_);
66 __ j(cc, target, near_jump);
67 }
68
Ben Murdoch097c5b22016-05-18 11:27:45 +010069 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 MacroAssembler* masm_;
71 Label patch_site_;
72#ifdef DEBUG
73 bool info_emitted_;
74#endif
75};
76
77
78// Generate code for a JS function. On entry to the function the receiver
79// and arguments have been pushed on the stack left to right, with the
80// return address on top of them. The actual argument count matches the
81// formal parameter count expected by the function.
82//
83// The live registers are:
84// o rdi: the JS function object being called (i.e. ourselves)
85// o rdx: the new target value
86// o rsi: our context
87// o rbp: our caller's frame pointer
88// o rsp: stack pointer (pointing to return address)
89//
90// The function builds a JS frame. Please see JavaScriptFrameConstants in
91// frames-x64.h for its layout.
92void FullCodeGenerator::Generate() {
93 CompilationInfo* info = info_;
94 profiling_counter_ = isolate()->factory()->NewCell(
95 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
96 SetFunctionPosition(literal());
97 Comment cmnt(masm_, "[ function compiled by full code generator");
98
99 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
100
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000101 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
102 StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
103 __ movp(rcx, args.GetReceiverOperand());
104 __ AssertNotSmi(rcx);
105 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rcx);
106 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
107 }
108
109 // Open a frame scope to indicate that there is a frame on the stack. The
110 // MANUAL indicates that the scope shouldn't actually generate code to set up
111 // the frame (that is done below).
112 FrameScope frame_scope(masm_, StackFrame::MANUAL);
113
114 info->set_prologue_offset(masm_->pc_offset());
115 __ Prologue(info->GeneratePreagedPrologue());
116
117 { Comment cmnt(masm_, "[ Allocate locals");
118 int locals_count = info->scope()->num_stack_slots();
119 // Generators allocate locals, if any, in context slots.
120 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100121 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000122 if (locals_count == 1) {
123 __ PushRoot(Heap::kUndefinedValueRootIndex);
124 } else if (locals_count > 1) {
125 if (locals_count >= 128) {
126 Label ok;
127 __ movp(rcx, rsp);
128 __ subp(rcx, Immediate(locals_count * kPointerSize));
129 __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
130 __ j(above_equal, &ok, Label::kNear);
131 __ CallRuntime(Runtime::kThrowStackOverflow);
132 __ bind(&ok);
133 }
134 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
135 const int kMaxPushes = 32;
136 if (locals_count >= kMaxPushes) {
137 int loop_iterations = locals_count / kMaxPushes;
138 __ movp(rcx, Immediate(loop_iterations));
139 Label loop_header;
140 __ bind(&loop_header);
141 // Do pushes.
142 for (int i = 0; i < kMaxPushes; i++) {
143 __ Push(rax);
144 }
145 // Continue loop if not done.
146 __ decp(rcx);
147 __ j(not_zero, &loop_header, Label::kNear);
148 }
149 int remaining = locals_count % kMaxPushes;
150 // Emit the remaining pushes.
151 for (int i = 0; i < remaining; i++) {
152 __ Push(rax);
153 }
154 }
155 }
156
157 bool function_in_register = true;
158
159 // Possibly allocate a local context.
160 if (info->scope()->num_heap_slots() > 0) {
161 Comment cmnt(masm_, "[ Allocate context");
162 bool need_write_barrier = true;
163 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
164 // Argument to NewContext is the function, which is still in rdi.
165 if (info->scope()->is_script_scope()) {
166 __ Push(rdi);
167 __ Push(info->scope()->GetScopeInfo(info->isolate()));
168 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100169 PrepareForBailoutForId(BailoutId::ScriptContext(),
170 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000171 // The new target value is not used, clobbering is safe.
172 DCHECK_NULL(info->scope()->new_target_var());
173 } else {
174 if (info->scope()->new_target_var() != nullptr) {
175 __ Push(rdx); // Preserve new target.
176 }
177 if (slots <= FastNewContextStub::kMaximumSlots) {
178 FastNewContextStub stub(isolate(), slots);
179 __ CallStub(&stub);
180 // Result of FastNewContextStub is always in new space.
181 need_write_barrier = false;
182 } else {
183 __ Push(rdi);
184 __ CallRuntime(Runtime::kNewFunctionContext);
185 }
186 if (info->scope()->new_target_var() != nullptr) {
187 __ Pop(rdx); // Restore new target.
188 }
189 }
190 function_in_register = false;
191 // Context is returned in rax. It replaces the context passed to us.
192 // It's saved in the stack and kept live in rsi.
193 __ movp(rsi, rax);
194 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
195
196 // Copy any necessary parameters into the context.
197 int num_parameters = info->scope()->num_parameters();
198 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
199 for (int i = first_parameter; i < num_parameters; i++) {
200 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
201 if (var->IsContextSlot()) {
202 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
203 (num_parameters - 1 - i) * kPointerSize;
204 // Load parameter from stack.
205 __ movp(rax, Operand(rbp, parameter_offset));
206 // Store it in the context.
207 int context_offset = Context::SlotOffset(var->index());
208 __ movp(Operand(rsi, context_offset), rax);
209 // Update the write barrier. This clobbers rax and rbx.
210 if (need_write_barrier) {
211 __ RecordWriteContextSlot(
212 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
213 } else if (FLAG_debug_code) {
214 Label done;
215 __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
216 __ Abort(kExpectedNewSpaceObject);
217 __ bind(&done);
218 }
219 }
220 }
221 }
222
223 // Register holding this function and new target are both trashed in case we
224 // bailout here. But since that can happen only when new target is not used
225 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100226 PrepareForBailoutForId(BailoutId::FunctionContext(),
227 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000228
229 // Possibly set up a local binding to the this function which is used in
230 // derived constructors with super calls.
231 Variable* this_function_var = scope()->this_function_var();
232 if (this_function_var != nullptr) {
233 Comment cmnt(masm_, "[ This function");
234 if (!function_in_register) {
235 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
236 // The write barrier clobbers register again, keep it marked as such.
237 }
238 SetVar(this_function_var, rdi, rbx, rcx);
239 }
240
241 // Possibly set up a local binding to the new target value.
242 Variable* new_target_var = scope()->new_target_var();
243 if (new_target_var != nullptr) {
244 Comment cmnt(masm_, "[ new.target");
245 SetVar(new_target_var, rdx, rbx, rcx);
246 }
247
248 // Possibly allocate RestParameters
249 int rest_index;
250 Variable* rest_param = scope()->rest_parameter(&rest_index);
251 if (rest_param) {
252 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100253 if (!function_in_register) {
254 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
255 }
256 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100258 function_in_register = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000259 SetVar(rest_param, rax, rbx, rdx);
260 }
261
262 // Possibly allocate an arguments object.
263 Variable* arguments = scope()->arguments();
264 if (arguments != NULL) {
265 // Arguments object must be allocated after the context object, in
266 // case the "arguments" or ".arguments" variables are in the context.
267 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 if (!function_in_register) {
269 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
270 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100271 if (is_strict(language_mode()) || !has_simple_parameters()) {
272 FastNewStrictArgumentsStub stub(isolate());
273 __ CallStub(&stub);
274 } else if (literal()->has_duplicate_parameters()) {
275 __ Push(rdi);
276 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
277 } else {
278 FastNewSloppyArgumentsStub stub(isolate());
279 __ CallStub(&stub);
280 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000281
282 SetVar(arguments, rax, rbx, rdx);
283 }
284
285 if (FLAG_trace) {
286 __ CallRuntime(Runtime::kTraceEnter);
287 }
288
289 // Visit the declarations and body unless there is an illegal
290 // redeclaration.
Ben Murdochc5610432016-08-08 18:44:38 +0100291 PrepareForBailoutForId(BailoutId::FunctionEntry(),
292 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100293 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100295 VisitDeclarations(scope()->declarations());
296 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000297
Ben Murdochda12d292016-06-02 14:46:10 +0100298 // Assert that the declarations do not use ICs. Otherwise the debugger
299 // won't be able to redirect a PC at an IC to the correct IC in newly
300 // recompiled code.
301 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302
Ben Murdochda12d292016-06-02 14:46:10 +0100303 {
304 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100305 PrepareForBailoutForId(BailoutId::Declarations(),
306 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100307 Label ok;
308 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
309 __ j(above_equal, &ok, Label::kNear);
310 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
311 __ bind(&ok);
312 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000313
Ben Murdochda12d292016-06-02 14:46:10 +0100314 {
315 Comment cmnt(masm_, "[ Body");
316 DCHECK(loop_depth() == 0);
317 VisitStatements(literal()->body());
318 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 }
320
321 // Always emit a 'return undefined' in case control fell off the end of
322 // the body.
323 { Comment cmnt(masm_, "[ return <undefined>;");
324 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
325 EmitReturnSequence();
326 }
327}
328
329
330void FullCodeGenerator::ClearAccumulator() {
331 __ Set(rax, 0);
332}
333
334
335void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
336 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
337 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
338 Smi::FromInt(-delta));
339}
340
341
342void FullCodeGenerator::EmitProfilingCounterReset() {
343 int reset_value = FLAG_interrupt_budget;
344 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
345 __ Move(kScratchRegister, Smi::FromInt(reset_value));
346 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
347}
348
349
350static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
351
352
353void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
354 Label* back_edge_target) {
355 Comment cmnt(masm_, "[ Back edge bookkeeping");
356 Label ok;
357
358 DCHECK(back_edge_target->is_bound());
359 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
360 int weight = Min(kMaxBackEdgeWeight,
361 Max(1, distance / kCodeSizeMultiplier));
362 EmitProfilingCounterDecrement(weight);
363
364 __ j(positive, &ok, Label::kNear);
365 {
366 PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
367 DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
368 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
369
370 // Record a mapping of this PC offset to the OSR id. This is used to find
371 // the AST id from the unoptimized code in order to use it as a key into
372 // the deoptimization input data found in the optimized code.
373 RecordBackEdge(stmt->OsrEntryId());
374
375 EmitProfilingCounterReset();
376 }
377 __ bind(&ok);
378
Ben Murdochc5610432016-08-08 18:44:38 +0100379 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000380 // Record a mapping of the OSR id to this PC. This is used if the OSR
381 // entry becomes the target of a bailout. We don't expect it to be, but
382 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100383 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000384}
385
Ben Murdoch097c5b22016-05-18 11:27:45 +0100386void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
387 bool is_tail_call) {
388 // Pretend that the exit is a backwards jump to the entry.
389 int weight = 1;
390 if (info_->ShouldSelfOptimize()) {
391 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
392 } else {
393 int distance = masm_->pc_offset();
394 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
395 }
396 EmitProfilingCounterDecrement(weight);
397 Label ok;
398 __ j(positive, &ok, Label::kNear);
399 // Don't need to save result register if we are going to do a tail call.
400 if (!is_tail_call) {
401 __ Push(rax);
402 }
403 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
404 if (!is_tail_call) {
405 __ Pop(rax);
406 }
407 EmitProfilingCounterReset();
408 __ bind(&ok);
409}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000410
411void FullCodeGenerator::EmitReturnSequence() {
412 Comment cmnt(masm_, "[ Return sequence");
413 if (return_label_.is_bound()) {
414 __ jmp(&return_label_);
415 } else {
416 __ bind(&return_label_);
417 if (FLAG_trace) {
418 __ Push(rax);
419 __ CallRuntime(Runtime::kTraceExit);
420 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100421 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000422
423 SetReturnPosition(literal());
424 __ leave();
425
426 int arg_count = info_->scope()->num_parameters() + 1;
427 int arguments_bytes = arg_count * kPointerSize;
428 __ Ret(arguments_bytes, rcx);
429 }
430}
431
Ben Murdochc5610432016-08-08 18:44:38 +0100432void FullCodeGenerator::RestoreContext() {
433 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
434}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435
436void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
437 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
438 MemOperand operand = codegen()->VarOperand(var, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100439 codegen()->PushOperand(operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000440}
441
442
443void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
444}
445
446
447void FullCodeGenerator::AccumulatorValueContext::Plug(
448 Heap::RootListIndex index) const {
449 __ LoadRoot(result_register(), index);
450}
451
452
453void FullCodeGenerator::StackValueContext::Plug(
454 Heap::RootListIndex index) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100455 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000456 __ PushRoot(index);
457}
458
459
460void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
461 codegen()->PrepareForBailoutBeforeSplit(condition(),
462 true,
463 true_label_,
464 false_label_);
465 if (index == Heap::kUndefinedValueRootIndex ||
466 index == Heap::kNullValueRootIndex ||
467 index == Heap::kFalseValueRootIndex) {
468 if (false_label_ != fall_through_) __ jmp(false_label_);
469 } else if (index == Heap::kTrueValueRootIndex) {
470 if (true_label_ != fall_through_) __ jmp(true_label_);
471 } else {
472 __ LoadRoot(result_register(), index);
473 codegen()->DoTest(this);
474 }
475}
476
477
478void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
479}
480
481
482void FullCodeGenerator::AccumulatorValueContext::Plug(
483 Handle<Object> lit) const {
484 if (lit->IsSmi()) {
485 __ SafeMove(result_register(), Smi::cast(*lit));
486 } else {
487 __ Move(result_register(), lit);
488 }
489}
490
491
492void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100493 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000494 if (lit->IsSmi()) {
495 __ SafePush(Smi::cast(*lit));
496 } else {
497 __ Push(lit);
498 }
499}
500
501
502void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
503 codegen()->PrepareForBailoutBeforeSplit(condition(),
504 true,
505 true_label_,
506 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100507 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000508 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
509 if (false_label_ != fall_through_) __ jmp(false_label_);
510 } else if (lit->IsTrue() || lit->IsJSObject()) {
511 if (true_label_ != fall_through_) __ jmp(true_label_);
512 } else if (lit->IsString()) {
513 if (String::cast(*lit)->length() == 0) {
514 if (false_label_ != fall_through_) __ jmp(false_label_);
515 } else {
516 if (true_label_ != fall_through_) __ jmp(true_label_);
517 }
518 } else if (lit->IsSmi()) {
519 if (Smi::cast(*lit)->value() == 0) {
520 if (false_label_ != fall_through_) __ jmp(false_label_);
521 } else {
522 if (true_label_ != fall_through_) __ jmp(true_label_);
523 }
524 } else {
525 // For simplicity we always test the accumulator register.
526 __ Move(result_register(), lit);
527 codegen()->DoTest(this);
528 }
529}
530
531
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000532void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
533 Register reg) const {
534 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100535 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536 __ movp(Operand(rsp, 0), reg);
537}
538
539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
541 Label* materialize_false) const {
542 DCHECK(materialize_true == materialize_false);
543 __ bind(materialize_true);
544}
545
546
547void FullCodeGenerator::AccumulatorValueContext::Plug(
548 Label* materialize_true,
549 Label* materialize_false) const {
550 Label done;
551 __ bind(materialize_true);
552 __ Move(result_register(), isolate()->factory()->true_value());
553 __ jmp(&done, Label::kNear);
554 __ bind(materialize_false);
555 __ Move(result_register(), isolate()->factory()->false_value());
556 __ bind(&done);
557}
558
559
560void FullCodeGenerator::StackValueContext::Plug(
561 Label* materialize_true,
562 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100563 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000564 Label done;
565 __ bind(materialize_true);
566 __ Push(isolate()->factory()->true_value());
567 __ jmp(&done, Label::kNear);
568 __ bind(materialize_false);
569 __ Push(isolate()->factory()->false_value());
570 __ bind(&done);
571}
572
573
574void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
575 Label* materialize_false) const {
576 DCHECK(materialize_true == true_label_);
577 DCHECK(materialize_false == false_label_);
578}
579
580
581void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
582 Heap::RootListIndex value_root_index =
583 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
584 __ LoadRoot(result_register(), value_root_index);
585}
586
587
588void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100589 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 Heap::RootListIndex value_root_index =
591 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
592 __ PushRoot(value_root_index);
593}
594
595
596void FullCodeGenerator::TestContext::Plug(bool flag) const {
597 codegen()->PrepareForBailoutBeforeSplit(condition(),
598 true,
599 true_label_,
600 false_label_);
601 if (flag) {
602 if (true_label_ != fall_through_) __ jmp(true_label_);
603 } else {
604 if (false_label_ != fall_through_) __ jmp(false_label_);
605 }
606}
607
608
609void FullCodeGenerator::DoTest(Expression* condition,
610 Label* if_true,
611 Label* if_false,
612 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100613 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000614 CallIC(ic, condition->test_id());
615 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
616 Split(equal, if_true, if_false, fall_through);
617}
618
619
620void FullCodeGenerator::Split(Condition cc,
621 Label* if_true,
622 Label* if_false,
623 Label* fall_through) {
624 if (if_false == fall_through) {
625 __ j(cc, if_true);
626 } else if (if_true == fall_through) {
627 __ j(NegateCondition(cc), if_false);
628 } else {
629 __ j(cc, if_true);
630 __ jmp(if_false);
631 }
632}
633
634
635MemOperand FullCodeGenerator::StackOperand(Variable* var) {
636 DCHECK(var->IsStackAllocated());
637 // Offset is negative because higher indexes are at lower addresses.
638 int offset = -var->index() * kPointerSize;
639 // Adjust by a (parameter or local) base offset.
640 if (var->IsParameter()) {
641 offset += kFPOnStackSize + kPCOnStackSize +
642 (info_->scope()->num_parameters() - 1) * kPointerSize;
643 } else {
644 offset += JavaScriptFrameConstants::kLocal0Offset;
645 }
646 return Operand(rbp, offset);
647}
648
649
650MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
651 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
652 if (var->IsContextSlot()) {
653 int context_chain_length = scope()->ContextChainLength(var->scope());
654 __ LoadContext(scratch, context_chain_length);
655 return ContextOperand(scratch, var->index());
656 } else {
657 return StackOperand(var);
658 }
659}
660
661
662void FullCodeGenerator::GetVar(Register dest, Variable* var) {
663 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
664 MemOperand location = VarOperand(var, dest);
665 __ movp(dest, location);
666}
667
668
669void FullCodeGenerator::SetVar(Variable* var,
670 Register src,
671 Register scratch0,
672 Register scratch1) {
673 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
674 DCHECK(!scratch0.is(src));
675 DCHECK(!scratch0.is(scratch1));
676 DCHECK(!scratch1.is(src));
677 MemOperand location = VarOperand(var, scratch0);
678 __ movp(location, src);
679
680 // Emit the write barrier code if the location is in the heap.
681 if (var->IsContextSlot()) {
682 int offset = Context::SlotOffset(var->index());
683 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
684 }
685}
686
687
688void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
689 bool should_normalize,
690 Label* if_true,
691 Label* if_false) {
692 // Only prepare for bailouts before splits if we're in a test
693 // context. Otherwise, we let the Visit function deal with the
694 // preparation to avoid preparing with the same AST id twice.
695 if (!context()->IsTest()) return;
696
697 Label skip;
698 if (should_normalize) __ jmp(&skip, Label::kNear);
Ben Murdochc5610432016-08-08 18:44:38 +0100699 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000700 if (should_normalize) {
701 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
702 Split(equal, if_true, if_false, NULL);
703 __ bind(&skip);
704 }
705}
706
707
708void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
709 // The variable in the declaration always resides in the current context.
710 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100711 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000712 // Check that we're not inside a with or catch context.
713 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
714 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
715 __ Check(not_equal, kDeclarationInWithContext);
716 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
717 __ Check(not_equal, kDeclarationInCatchContext);
718 }
719}
720
721
722void FullCodeGenerator::VisitVariableDeclaration(
723 VariableDeclaration* declaration) {
724 // If it was not possible to allocate the variable at compile time, we
725 // need to "declare" it at runtime to make sure it actually exists in the
726 // local context.
727 VariableProxy* proxy = declaration->proxy();
728 VariableMode mode = declaration->mode();
729 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100730 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000731 switch (variable->location()) {
732 case VariableLocation::GLOBAL:
733 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100734 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000735 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100736 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000737 break;
738
739 case VariableLocation::PARAMETER:
740 case VariableLocation::LOCAL:
741 if (hole_init) {
742 Comment cmnt(masm_, "[ VariableDeclaration");
743 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
744 __ movp(StackOperand(variable), kScratchRegister);
745 }
746 break;
747
748 case VariableLocation::CONTEXT:
749 if (hole_init) {
750 Comment cmnt(masm_, "[ VariableDeclaration");
751 EmitDebugCheckDeclarationContext(variable);
752 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
753 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
754 // No write barrier since the hole value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100755 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000756 }
757 break;
758
759 case VariableLocation::LOOKUP: {
760 Comment cmnt(masm_, "[ VariableDeclaration");
761 __ Push(variable->name());
762 // Declaration nodes are always introduced in one of four modes.
763 DCHECK(IsDeclaredVariableMode(mode));
764 // Push initial value, if any.
765 // Note: For variables we must not push an initial value (such as
766 // 'undefined') because we may have a (legal) redeclaration and we
767 // must not destroy the current value.
768 if (hole_init) {
769 __ PushRoot(Heap::kTheHoleValueRootIndex);
770 } else {
771 __ Push(Smi::FromInt(0)); // Indicates no initial value.
772 }
773 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
774 __ CallRuntime(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100775 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000776 break;
777 }
778 }
779}
780
781
782void FullCodeGenerator::VisitFunctionDeclaration(
783 FunctionDeclaration* declaration) {
784 VariableProxy* proxy = declaration->proxy();
785 Variable* variable = proxy->var();
786 switch (variable->location()) {
787 case VariableLocation::GLOBAL:
788 case VariableLocation::UNALLOCATED: {
789 globals_->Add(variable->name(), zone());
790 Handle<SharedFunctionInfo> function =
791 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
792 // Check for stack-overflow exception.
793 if (function.is_null()) return SetStackOverflow();
794 globals_->Add(function, zone());
795 break;
796 }
797
798 case VariableLocation::PARAMETER:
799 case VariableLocation::LOCAL: {
800 Comment cmnt(masm_, "[ FunctionDeclaration");
801 VisitForAccumulatorValue(declaration->fun());
802 __ movp(StackOperand(variable), result_register());
803 break;
804 }
805
806 case VariableLocation::CONTEXT: {
807 Comment cmnt(masm_, "[ FunctionDeclaration");
808 EmitDebugCheckDeclarationContext(variable);
809 VisitForAccumulatorValue(declaration->fun());
810 __ movp(ContextOperand(rsi, variable->index()), result_register());
811 int offset = Context::SlotOffset(variable->index());
812 // We know that we have written a function, which is not a smi.
813 __ RecordWriteContextSlot(rsi,
814 offset,
815 result_register(),
816 rcx,
817 kDontSaveFPRegs,
818 EMIT_REMEMBERED_SET,
819 OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100820 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000821 break;
822 }
823
824 case VariableLocation::LOOKUP: {
825 Comment cmnt(masm_, "[ FunctionDeclaration");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100826 PushOperand(variable->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000827 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100828 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
829 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100830 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000831 break;
832 }
833 }
834}
835
836
837void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
838 // Call the runtime to declare the globals.
839 __ Push(pairs);
840 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
841 __ CallRuntime(Runtime::kDeclareGlobals);
842 // Return value is ignored.
843}
844
845
846void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
847 // Call the runtime to declare the modules.
848 __ Push(descriptions);
849 __ CallRuntime(Runtime::kDeclareModules);
850 // Return value is ignored.
851}
852
853
854void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
855 Comment cmnt(masm_, "[ SwitchStatement");
856 Breakable nested_statement(this, stmt);
857 SetStatementPosition(stmt);
858
859 // Keep the switch value on the stack until a case matches.
860 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100861 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000862
863 ZoneList<CaseClause*>* clauses = stmt->cases();
864 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
865
866 Label next_test; // Recycled for each test.
867 // Compile all the tests with branches to their bodies.
868 for (int i = 0; i < clauses->length(); i++) {
869 CaseClause* clause = clauses->at(i);
870 clause->body_target()->Unuse();
871
872 // The default is not a test, but remember it as final fall through.
873 if (clause->is_default()) {
874 default_clause = clause;
875 continue;
876 }
877
878 Comment cmnt(masm_, "[ Case comparison");
879 __ bind(&next_test);
880 next_test.Unuse();
881
882 // Compile the label expression.
883 VisitForAccumulatorValue(clause->label());
884
885 // Perform the comparison as if via '==='.
886 __ movp(rdx, Operand(rsp, 0)); // Switch value.
887 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
888 JumpPatchSite patch_site(masm_);
889 if (inline_smi_code) {
890 Label slow_case;
891 __ movp(rcx, rdx);
892 __ orp(rcx, rax);
893 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
894
895 __ cmpp(rdx, rax);
896 __ j(not_equal, &next_test);
897 __ Drop(1); // Switch value is no longer needed.
898 __ jmp(clause->body_target());
899 __ bind(&slow_case);
900 }
901
902 // Record position before stub call for type feedback.
903 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100904 Handle<Code> ic =
905 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000906 CallIC(ic, clause->CompareId());
907 patch_site.EmitPatchInfo();
908
909 Label skip;
910 __ jmp(&skip, Label::kNear);
Ben Murdochc5610432016-08-08 18:44:38 +0100911 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000912 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
913 __ j(not_equal, &next_test);
914 __ Drop(1);
915 __ jmp(clause->body_target());
916 __ bind(&skip);
917
918 __ testp(rax, rax);
919 __ j(not_equal, &next_test);
920 __ Drop(1); // Switch value is no longer needed.
921 __ jmp(clause->body_target());
922 }
923
924 // Discard the test value and jump to the default if present, otherwise to
925 // the end of the statement.
926 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100927 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000928 if (default_clause == NULL) {
929 __ jmp(nested_statement.break_label());
930 } else {
931 __ jmp(default_clause->body_target());
932 }
933
934 // Compile all the case bodies.
935 for (int i = 0; i < clauses->length(); i++) {
936 Comment cmnt(masm_, "[ Case body");
937 CaseClause* clause = clauses->at(i);
938 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100939 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000940 VisitStatements(clause->statements());
941 }
942
943 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100944 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000945}
946
947
948void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
949 Comment cmnt(masm_, "[ ForInStatement");
950 SetStatementPosition(stmt, SKIP_BREAK);
951
952 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
953
Ben Murdoch097c5b22016-05-18 11:27:45 +0100954 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000955 SetExpressionAsStatementPosition(stmt->enumerable());
956 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100957 OperandStackDepthIncrement(5);
958
959 Label loop, exit;
960 Iteration loop_statement(this, stmt);
961 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000962
Ben Murdoch097c5b22016-05-18 11:27:45 +0100963 // If the object is null or undefined, skip over the loop, otherwise convert
964 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000965 Label convert, done_convert;
966 __ JumpIfSmi(rax, &convert, Label::kNear);
967 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
968 __ j(above_equal, &done_convert, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100969 __ CompareRoot(rax, Heap::kNullValueRootIndex);
970 __ j(equal, &exit);
971 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
972 __ j(equal, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000973 __ bind(&convert);
974 ToObjectStub stub(isolate());
975 __ CallStub(&stub);
976 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +0100977 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000978 __ Push(rax);
979
Ben Murdochc5610432016-08-08 18:44:38 +0100980 // Check cache validity in generated code. If we cannot guarantee cache
981 // validity, call the runtime system to check cache validity or get the
982 // property names in a fixed array. Note: Proxies never have an enum cache,
983 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100984 Label call_runtime;
985 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000986
987 // The enum cache is valid. Load the map of the object being
988 // iterated over and use the cache for the iteration.
989 Label use_cache;
990 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
991 __ jmp(&use_cache, Label::kNear);
992
993 // Get the set of properties to enumerate.
994 __ bind(&call_runtime);
995 __ Push(rax); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100996 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +0100997 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000998
999 // If we got a map from the runtime call, we can do a fast
1000 // modification check. Otherwise, we got a fixed array, and we have
1001 // to do a slow check.
1002 Label fixed_array;
1003 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1004 Heap::kMetaMapRootIndex);
1005 __ j(not_equal, &fixed_array);
1006
1007 // We got a map in register rax. Get the enumeration cache from it.
1008 __ bind(&use_cache);
1009
1010 Label no_descriptors;
1011
1012 __ EnumLength(rdx, rax);
1013 __ Cmp(rdx, Smi::FromInt(0));
1014 __ j(equal, &no_descriptors);
1015
1016 __ LoadInstanceDescriptors(rax, rcx);
1017 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1018 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1019
1020 // Set up the four remaining stack slots.
1021 __ Push(rax); // Map.
1022 __ Push(rcx); // Enumeration cache.
1023 __ Push(rdx); // Number of valid entries for the map in the enum cache.
1024 __ Push(Smi::FromInt(0)); // Initial index.
1025 __ jmp(&loop);
1026
1027 __ bind(&no_descriptors);
1028 __ addp(rsp, Immediate(kPointerSize));
1029 __ jmp(&exit);
1030
1031 // We got a fixed array in register rax. Iterate through that.
1032 __ bind(&fixed_array);
1033
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001034 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1035 __ Push(Smi::FromInt(1)); // Smi(1) indicates slow check
1036 __ Push(rax); // Array
1037 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1038 __ Push(rax); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001039 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001040 __ Push(Smi::FromInt(0)); // Initial index.
1041
1042 // Generate code for doing the condition check.
1043 __ bind(&loop);
1044 SetExpressionAsStatementPosition(stmt->each());
1045
1046 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1047 __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1048 __ j(above_equal, loop_statement.break_label());
1049
1050 // Get the current entry of the array into register rbx.
1051 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1052 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1053 __ movp(rbx, FieldOperand(rbx,
1054 index.reg,
1055 index.scale,
1056 FixedArray::kHeaderSize));
1057
1058 // Get the expected map from the stack or a smi in the
1059 // permanent slow case into register rdx.
1060 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1061
1062 // Check if the expected map still matches that of the enumerable.
1063 // If not, we may have to filter the key.
1064 Label update_each;
1065 __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1066 __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1067 __ j(equal, &update_each, Label::kNear);
1068
Ben Murdochda12d292016-06-02 14:46:10 +01001069 // We need to filter the key, record slow-path here.
1070 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001071 __ EmitLoadTypeFeedbackVector(rdx);
1072 __ Move(FieldOperand(rdx, FixedArray::OffsetOfElementAt(vector_index)),
1073 TypeFeedbackVector::MegamorphicSentinel(isolate()));
1074
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001075 // Convert the entry to a string or null if it isn't a property
1076 // anymore. If the property has been removed while iterating, we
1077 // just skip it.
1078 __ Push(rcx); // Enumerable.
1079 __ Push(rbx); // Current entry.
1080 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001081 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001082 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1083 __ j(equal, loop_statement.continue_label());
1084 __ movp(rbx, rax);
1085
1086 // Update the 'each' property or variable from the possibly filtered
1087 // entry in register rbx.
1088 __ bind(&update_each);
1089 __ movp(result_register(), rbx);
1090 // Perform the assignment as if via '='.
1091 { EffectContext context(this);
1092 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001093 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001094 }
1095
1096 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001097 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001098 // Generate code for the body of the loop.
1099 Visit(stmt->body());
1100
1101 // Generate code for going to the next element by incrementing the
1102 // index (smi) stored on top of the stack.
1103 __ bind(loop_statement.continue_label());
1104 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1105
1106 EmitBackEdgeBookkeeping(stmt, &loop);
1107 __ jmp(&loop);
1108
1109 // Remove the pointers stored on the stack.
1110 __ bind(loop_statement.break_label());
Ben Murdochda12d292016-06-02 14:46:10 +01001111 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001112
1113 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001114 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001115 __ bind(&exit);
1116 decrement_loop_depth();
1117}
1118
1119
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001120void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1121 FeedbackVectorSlot slot) {
1122 DCHECK(NeedsHomeObject(initializer));
1123 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1124 __ Move(StoreDescriptor::NameRegister(),
1125 isolate()->factory()->home_object_symbol());
1126 __ movp(StoreDescriptor::ValueRegister(),
1127 Operand(rsp, offset * kPointerSize));
1128 EmitLoadStoreICSlot(slot);
1129 CallStoreIC();
1130}
1131
1132
1133void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1134 int offset,
1135 FeedbackVectorSlot slot) {
1136 DCHECK(NeedsHomeObject(initializer));
1137 __ movp(StoreDescriptor::ReceiverRegister(), rax);
1138 __ Move(StoreDescriptor::NameRegister(),
1139 isolate()->factory()->home_object_symbol());
1140 __ movp(StoreDescriptor::ValueRegister(),
1141 Operand(rsp, offset * kPointerSize));
1142 EmitLoadStoreICSlot(slot);
1143 CallStoreIC();
1144}
1145
1146
1147void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1148 TypeofMode typeof_mode,
1149 Label* slow) {
1150 Register context = rsi;
1151 Register temp = rdx;
1152
1153 Scope* s = scope();
1154 while (s != NULL) {
1155 if (s->num_heap_slots() > 0) {
1156 if (s->calls_sloppy_eval()) {
1157 // Check that extension is "the hole".
1158 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1159 Heap::kTheHoleValueRootIndex, slow);
1160 }
1161 // Load next context in chain.
1162 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1163 // Walk the rest of the chain without clobbering rsi.
1164 context = temp;
1165 }
1166 // If no outer scope calls eval, we do not need to check more
1167 // context extensions. If we have reached an eval scope, we check
1168 // all extensions from this point.
1169 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1170 s = s->outer_scope();
1171 }
1172
1173 if (s != NULL && s->is_eval_scope()) {
1174 // Loop up the context chain. There is no frame effect so it is
1175 // safe to use raw labels here.
1176 Label next, fast;
1177 if (!context.is(temp)) {
1178 __ movp(temp, context);
1179 }
1180 // Load map for comparison into register, outside loop.
1181 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1182 __ bind(&next);
1183 // Terminate at native context.
1184 __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1185 __ j(equal, &fast, Label::kNear);
1186 // Check that extension is "the hole".
1187 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1188 Heap::kTheHoleValueRootIndex, slow);
1189 // Load next context in chain.
1190 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1191 __ jmp(&next);
1192 __ bind(&fast);
1193 }
1194
1195 // All extension objects were empty and it is safe to use a normal global
1196 // load machinery.
1197 EmitGlobalVariableLoad(proxy, typeof_mode);
1198}
1199
1200
1201MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1202 Label* slow) {
1203 DCHECK(var->IsContextSlot());
1204 Register context = rsi;
1205 Register temp = rbx;
1206
1207 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1208 if (s->num_heap_slots() > 0) {
1209 if (s->calls_sloppy_eval()) {
1210 // Check that extension is "the hole".
1211 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1212 Heap::kTheHoleValueRootIndex, slow);
1213 }
1214 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1215 // Walk the rest of the chain without clobbering rsi.
1216 context = temp;
1217 }
1218 }
1219 // Check that last extension is "the hole".
1220 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1221 Heap::kTheHoleValueRootIndex, slow);
1222
1223 // This function is used only for loads, not stores, so it's safe to
1224 // return an rsi-based operand (the write barrier cannot be allowed to
1225 // destroy the rsi register).
1226 return ContextOperand(context, var->index());
1227}
1228
1229
1230void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1231 TypeofMode typeof_mode,
1232 Label* slow, Label* done) {
1233 // Generate fast-case code for variables that might be shadowed by
1234 // eval-introduced variables. Eval is used a lot without
1235 // introducing variables. In those cases, we do not want to
1236 // perform a runtime call for all variables in the scope
1237 // containing the eval.
1238 Variable* var = proxy->var();
1239 if (var->mode() == DYNAMIC_GLOBAL) {
1240 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1241 __ jmp(done);
1242 } else if (var->mode() == DYNAMIC_LOCAL) {
1243 Variable* local = var->local_if_not_shadowed();
1244 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001245 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001246 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1247 __ j(not_equal, done);
Ben Murdochc5610432016-08-08 18:44:38 +01001248 __ Push(var->name());
1249 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001250 }
1251 __ jmp(done);
1252 }
1253}
1254
1255
1256void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1257 TypeofMode typeof_mode) {
1258 Variable* var = proxy->var();
1259 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1260 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1261 __ Move(LoadDescriptor::NameRegister(), var->name());
1262 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1263 __ Move(LoadDescriptor::SlotRegister(),
1264 SmiFromSlot(proxy->VariableFeedbackSlot()));
1265 CallLoadIC(typeof_mode);
1266}
1267
1268
1269void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1270 TypeofMode typeof_mode) {
1271 // Record position before possible IC call.
1272 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001273 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001274 Variable* var = proxy->var();
1275
1276 // Three cases: global variables, lookup variables, and all other types of
1277 // variables.
1278 switch (var->location()) {
1279 case VariableLocation::GLOBAL:
1280 case VariableLocation::UNALLOCATED: {
1281 Comment cmnt(masm_, "[ Global variable");
1282 EmitGlobalVariableLoad(proxy, typeof_mode);
1283 context()->Plug(rax);
1284 break;
1285 }
1286
1287 case VariableLocation::PARAMETER:
1288 case VariableLocation::LOCAL:
1289 case VariableLocation::CONTEXT: {
1290 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1291 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1292 : "[ Stack slot");
1293 if (NeedsHoleCheckForLoad(proxy)) {
1294 // Let and const need a read barrier.
1295 Label done;
1296 GetVar(rax, var);
1297 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1298 __ j(not_equal, &done, Label::kNear);
1299 if (var->mode() == LET || var->mode() == CONST) {
1300 // Throw a reference error when using an uninitialized let/const
1301 // binding in harmony mode.
1302 __ Push(var->name());
1303 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001304 }
1305 __ bind(&done);
1306 context()->Plug(rax);
1307 break;
1308 }
1309 context()->Plug(var);
1310 break;
1311 }
1312
1313 case VariableLocation::LOOKUP: {
1314 Comment cmnt(masm_, "[ Lookup slot");
1315 Label done, slow;
1316 // Generate code for loading from variables potentially shadowed
1317 // by eval-introduced variables.
1318 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1319 __ bind(&slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001320 __ Push(var->name());
1321 Runtime::FunctionId function_id =
1322 typeof_mode == NOT_INSIDE_TYPEOF
1323 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001324 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001325 __ CallRuntime(function_id);
1326 __ bind(&done);
1327 context()->Plug(rax);
1328 break;
1329 }
1330 }
1331}
1332
1333
1334void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1335 Comment cmnt(masm_, "[ RegExpLiteral");
1336 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1337 __ Move(rax, Smi::FromInt(expr->literal_index()));
1338 __ Move(rcx, expr->pattern());
1339 __ Move(rdx, Smi::FromInt(expr->flags()));
1340 FastCloneRegExpStub stub(isolate());
1341 __ CallStub(&stub);
1342 context()->Plug(rax);
1343}
1344
1345
1346void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1347 Expression* expression = (property == NULL) ? NULL : property->value();
1348 if (expression == NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001349 OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001350 __ PushRoot(Heap::kNullValueRootIndex);
1351 } else {
1352 VisitForStackValue(expression);
1353 if (NeedsHomeObject(expression)) {
1354 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1355 property->kind() == ObjectLiteral::Property::SETTER);
1356 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1357 EmitSetHomeObject(expression, offset, property->GetSlot());
1358 }
1359 }
1360}
1361
1362
1363void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1364 Comment cmnt(masm_, "[ ObjectLiteral");
1365
1366 Handle<FixedArray> constant_properties = expr->constant_properties();
1367 int flags = expr->ComputeFlags();
1368 if (MustCreateObjectLiteralWithRuntime(expr)) {
1369 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1370 __ Push(Smi::FromInt(expr->literal_index()));
1371 __ Push(constant_properties);
1372 __ Push(Smi::FromInt(flags));
1373 __ CallRuntime(Runtime::kCreateObjectLiteral);
1374 } else {
1375 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1376 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1377 __ Move(rcx, constant_properties);
1378 __ Move(rdx, Smi::FromInt(flags));
1379 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1380 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001381 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001382 }
Ben Murdochc5610432016-08-08 18:44:38 +01001383 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001384
1385 // If result_saved is true the result is on top of the stack. If
1386 // result_saved is false the result is in rax.
1387 bool result_saved = false;
1388
1389 AccessorTable accessor_table(zone());
1390 int property_index = 0;
1391 for (; property_index < expr->properties()->length(); property_index++) {
1392 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1393 if (property->is_computed_name()) break;
1394 if (property->IsCompileTimeValue()) continue;
1395
1396 Literal* key = property->key()->AsLiteral();
1397 Expression* value = property->value();
1398 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001399 PushOperand(rax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001400 result_saved = true;
1401 }
1402 switch (property->kind()) {
1403 case ObjectLiteral::Property::CONSTANT:
1404 UNREACHABLE();
1405 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1406 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1407 // Fall through.
1408 case ObjectLiteral::Property::COMPUTED:
1409 // It is safe to use [[Put]] here because the boilerplate already
1410 // contains computed properties with an uninitialized value.
1411 if (key->value()->IsInternalizedString()) {
1412 if (property->emit_store()) {
1413 VisitForAccumulatorValue(value);
1414 DCHECK(StoreDescriptor::ValueRegister().is(rax));
1415 __ Move(StoreDescriptor::NameRegister(), key->value());
1416 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1417 EmitLoadStoreICSlot(property->GetSlot(0));
1418 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001419 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001420
1421 if (NeedsHomeObject(value)) {
1422 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1423 }
1424 } else {
1425 VisitForEffect(value);
1426 }
1427 break;
1428 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001429 PushOperand(Operand(rsp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001430 VisitForStackValue(key);
1431 VisitForStackValue(value);
1432 if (property->emit_store()) {
1433 if (NeedsHomeObject(value)) {
1434 EmitSetHomeObject(value, 2, property->GetSlot());
1435 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001436 PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1437 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001438 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001439 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440 }
1441 break;
1442 case ObjectLiteral::Property::PROTOTYPE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001443 PushOperand(Operand(rsp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001444 VisitForStackValue(value);
1445 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001446 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001447 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001448 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001449 break;
1450 case ObjectLiteral::Property::GETTER:
1451 if (property->emit_store()) {
1452 accessor_table.lookup(key)->second->getter = property;
1453 }
1454 break;
1455 case ObjectLiteral::Property::SETTER:
1456 if (property->emit_store()) {
1457 accessor_table.lookup(key)->second->setter = property;
1458 }
1459 break;
1460 }
1461 }
1462
1463 // Emit code to define accessors, using only a single call to the runtime for
1464 // each pair of corresponding getters and setters.
1465 for (AccessorTable::Iterator it = accessor_table.begin();
1466 it != accessor_table.end();
1467 ++it) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001468 PushOperand(Operand(rsp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001469 VisitForStackValue(it->first);
1470 EmitAccessor(it->second->getter);
1471 EmitAccessor(it->second->setter);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001472 PushOperand(Smi::FromInt(NONE));
1473 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 }
1475
1476 // Object literals have two parts. The "static" part on the left contains no
1477 // computed property names, and so we can compute its map ahead of time; see
1478 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1479 // starts with the first computed property name, and continues with all
1480 // properties to its right. All the code from above initializes the static
1481 // component of the object literal, and arranges for the map of the result to
1482 // reflect the static order in which the keys appear. For the dynamic
1483 // properties, we compile them into a series of "SetOwnProperty" runtime
1484 // calls. This will preserve insertion order.
1485 for (; property_index < expr->properties()->length(); property_index++) {
1486 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1487
1488 Expression* value = property->value();
1489 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001490 PushOperand(rax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001491 result_saved = true;
1492 }
1493
Ben Murdoch097c5b22016-05-18 11:27:45 +01001494 PushOperand(Operand(rsp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001495
1496 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1497 DCHECK(!property->is_computed_name());
1498 VisitForStackValue(value);
1499 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001500 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001501 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001502 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001503 } else {
1504 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1505 VisitForStackValue(value);
1506 if (NeedsHomeObject(value)) {
1507 EmitSetHomeObject(value, 2, property->GetSlot());
1508 }
1509
1510 switch (property->kind()) {
1511 case ObjectLiteral::Property::CONSTANT:
1512 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1513 case ObjectLiteral::Property::COMPUTED:
1514 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001515 PushOperand(Smi::FromInt(NONE));
1516 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1517 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001519 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001520 }
1521 break;
1522
1523 case ObjectLiteral::Property::PROTOTYPE:
1524 UNREACHABLE();
1525 break;
1526
1527 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001528 PushOperand(Smi::FromInt(NONE));
1529 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001530 break;
1531
1532 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001533 PushOperand(Smi::FromInt(NONE));
1534 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535 break;
1536 }
1537 }
1538 }
1539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540 if (result_saved) {
1541 context()->PlugTOS();
1542 } else {
1543 context()->Plug(rax);
1544 }
1545}
1546
1547
1548void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1549 Comment cmnt(masm_, "[ ArrayLiteral");
1550
1551 Handle<FixedArray> constant_elements = expr->constant_elements();
1552 bool has_constant_fast_elements =
1553 IsFastObjectElementsKind(expr->constant_elements_kind());
1554
1555 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1556 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1557 // If the only customer of allocation sites is transitioning, then
1558 // we can turn it off if we don't have anywhere else to transition to.
1559 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1560 }
1561
1562 if (MustCreateArrayLiteralWithRuntime(expr)) {
1563 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1564 __ Push(Smi::FromInt(expr->literal_index()));
1565 __ Push(constant_elements);
1566 __ Push(Smi::FromInt(expr->ComputeFlags()));
1567 __ CallRuntime(Runtime::kCreateArrayLiteral);
1568 } else {
1569 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1570 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1571 __ Move(rcx, constant_elements);
1572 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1573 __ CallStub(&stub);
1574 }
Ben Murdochc5610432016-08-08 18:44:38 +01001575 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001576
1577 bool result_saved = false; // Is the result saved to the stack?
1578 ZoneList<Expression*>* subexprs = expr->values();
1579 int length = subexprs->length();
1580
1581 // Emit code to evaluate all the non-constant subexpressions and to store
1582 // them into the newly cloned array.
1583 int array_index = 0;
1584 for (; array_index < length; array_index++) {
1585 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001586 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001587
1588 // If the subexpression is a literal or a simple materialized literal it
1589 // is already set in the cloned array.
1590 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1591
1592 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001593 PushOperand(rax); // array literal
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001594 result_saved = true;
1595 }
1596 VisitForAccumulatorValue(subexpr);
1597
1598 __ Move(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1599 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1600 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1601 Handle<Code> ic =
1602 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1603 CallIC(ic);
1604
Ben Murdochc5610432016-08-08 18:44:38 +01001605 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1606 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001607 }
1608
1609 // In case the array literal contains spread expressions it has two parts. The
1610 // first part is the "static" array which has a literal index is handled
1611 // above. The second part is the part after the first spread expression
1612 // (inclusive) and these elements gets appended to the array. Note that the
1613 // number elements an iterable produces is unknown ahead of time.
1614 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001615 PopOperand(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001616 result_saved = false;
1617 }
1618 for (; array_index < length; array_index++) {
1619 Expression* subexpr = subexprs->at(array_index);
1620
Ben Murdoch097c5b22016-05-18 11:27:45 +01001621 PushOperand(rax);
1622 DCHECK(!subexpr->IsSpread());
1623 VisitForStackValue(subexpr);
1624 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625
Ben Murdochc5610432016-08-08 18:44:38 +01001626 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1627 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001628 }
1629
1630 if (result_saved) {
1631 context()->PlugTOS();
1632 } else {
1633 context()->Plug(rax);
1634 }
1635}
1636
1637
1638void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1639 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1640
1641 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001642
1643 Property* property = expr->target()->AsProperty();
1644 LhsKind assign_type = Property::GetAssignType(property);
1645
1646 // Evaluate LHS expression.
1647 switch (assign_type) {
1648 case VARIABLE:
1649 // Nothing to do here.
1650 break;
1651 case NAMED_PROPERTY:
1652 if (expr->is_compound()) {
1653 // We need the receiver both on the stack and in the register.
1654 VisitForStackValue(property->obj());
1655 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1656 } else {
1657 VisitForStackValue(property->obj());
1658 }
1659 break;
1660 case NAMED_SUPER_PROPERTY:
1661 VisitForStackValue(
1662 property->obj()->AsSuperPropertyReference()->this_var());
1663 VisitForAccumulatorValue(
1664 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001665 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001666 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001667 PushOperand(MemOperand(rsp, kPointerSize));
1668 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001669 }
1670 break;
1671 case KEYED_SUPER_PROPERTY:
1672 VisitForStackValue(
1673 property->obj()->AsSuperPropertyReference()->this_var());
1674 VisitForStackValue(
1675 property->obj()->AsSuperPropertyReference()->home_object());
1676 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001677 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001678 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001679 PushOperand(MemOperand(rsp, 2 * kPointerSize));
1680 PushOperand(MemOperand(rsp, 2 * kPointerSize));
1681 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001682 }
1683 break;
1684 case KEYED_PROPERTY: {
1685 if (expr->is_compound()) {
1686 VisitForStackValue(property->obj());
1687 VisitForStackValue(property->key());
1688 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
1689 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1690 } else {
1691 VisitForStackValue(property->obj());
1692 VisitForStackValue(property->key());
1693 }
1694 break;
1695 }
1696 }
1697
1698 // For compound assignments we need another deoptimization point after the
1699 // variable/property load.
1700 if (expr->is_compound()) {
1701 { AccumulatorValueContext context(this);
1702 switch (assign_type) {
1703 case VARIABLE:
1704 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001705 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706 break;
1707 case NAMED_PROPERTY:
1708 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001709 PrepareForBailoutForId(property->LoadId(),
1710 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001711 break;
1712 case NAMED_SUPER_PROPERTY:
1713 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001714 PrepareForBailoutForId(property->LoadId(),
1715 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 break;
1717 case KEYED_SUPER_PROPERTY:
1718 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001719 PrepareForBailoutForId(property->LoadId(),
1720 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721 break;
1722 case KEYED_PROPERTY:
1723 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001724 PrepareForBailoutForId(property->LoadId(),
1725 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001726 break;
1727 }
1728 }
1729
1730 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001731 PushOperand(rax); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 VisitForAccumulatorValue(expr->value());
1733
1734 AccumulatorValueContext context(this);
1735 if (ShouldInlineSmiCase(op)) {
1736 EmitInlineSmiBinaryOp(expr->binary_operation(),
1737 op,
1738 expr->target(),
1739 expr->value());
1740 } else {
1741 EmitBinaryOp(expr->binary_operation(), op);
1742 }
1743 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001744 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745 } else {
1746 VisitForAccumulatorValue(expr->value());
1747 }
1748
1749 SetExpressionPosition(expr);
1750
1751 // Store the value.
1752 switch (assign_type) {
1753 case VARIABLE:
1754 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1755 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001756 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001757 context()->Plug(rax);
1758 break;
1759 case NAMED_PROPERTY:
1760 EmitNamedPropertyAssignment(expr);
1761 break;
1762 case NAMED_SUPER_PROPERTY:
1763 EmitNamedSuperPropertyStore(property);
1764 context()->Plug(rax);
1765 break;
1766 case KEYED_SUPER_PROPERTY:
1767 EmitKeyedSuperPropertyStore(property);
1768 context()->Plug(rax);
1769 break;
1770 case KEYED_PROPERTY:
1771 EmitKeyedPropertyAssignment(expr);
1772 break;
1773 }
1774}
1775
1776
1777void FullCodeGenerator::VisitYield(Yield* expr) {
1778 Comment cmnt(masm_, "[ Yield");
1779 SetExpressionPosition(expr);
1780
1781 // Evaluate yielded value first; the initial iterator definition depends on
1782 // this. It stays on the stack while we update the iterator.
1783 VisitForStackValue(expr->expression());
1784
Ben Murdochc5610432016-08-08 18:44:38 +01001785 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001786
Ben Murdochda12d292016-06-02 14:46:10 +01001787 __ jmp(&suspend);
1788 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001789 // When we arrive here, rax holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001790 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001791 __ movp(rbx, FieldOperand(rax, JSGeneratorObject::kResumeModeOffset));
1792 __ movp(rax, FieldOperand(rax, JSGeneratorObject::kInputOffset));
1793 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1794 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1795 __ SmiCompare(rbx, Smi::FromInt(JSGeneratorObject::kReturn));
1796 __ j(less, &resume);
Ben Murdochda12d292016-06-02 14:46:10 +01001797 __ Push(result_register());
Ben Murdochc5610432016-08-08 18:44:38 +01001798 __ j(greater, &exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001799 EmitCreateIteratorResult(true);
1800 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001801
Ben Murdochc5610432016-08-08 18:44:38 +01001802 __ bind(&exception);
1803 __ CallRuntime(Runtime::kThrow);
1804
Ben Murdochda12d292016-06-02 14:46:10 +01001805 __ bind(&suspend);
1806 OperandStackDepthIncrement(1); // Not popped on this path.
1807 VisitForAccumulatorValue(expr->generator_object());
1808 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1809 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
1810 Smi::FromInt(continuation.pos()));
1811 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
1812 __ movp(rcx, rsi);
1813 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
1814 kDontSaveFPRegs);
1815 __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
1816 __ cmpp(rsp, rbx);
1817 __ j(equal, &post_runtime);
1818 __ Push(rax); // generator object
1819 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001820 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001821 __ bind(&post_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001822
Ben Murdochda12d292016-06-02 14:46:10 +01001823 PopOperand(result_register());
1824 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001825
Ben Murdochda12d292016-06-02 14:46:10 +01001826 __ bind(&resume);
1827 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001828}
1829
Ben Murdoch097c5b22016-05-18 11:27:45 +01001830void FullCodeGenerator::PushOperand(MemOperand operand) {
1831 OperandStackDepthIncrement(1);
1832 __ Push(operand);
1833}
1834
1835void FullCodeGenerator::EmitOperandStackDepthCheck() {
1836 if (FLAG_debug_code) {
1837 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1838 operand_stack_depth_ * kPointerSize;
1839 __ movp(rax, rbp);
1840 __ subp(rax, rsp);
1841 __ cmpp(rax, Immediate(expected_diff));
1842 __ Assert(equal, kUnexpectedStackDepth);
1843 }
1844}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001845
1846void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1847 Label allocate, done_allocate;
1848
Ben Murdochc5610432016-08-08 18:44:38 +01001849 __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &allocate,
1850 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851 __ jmp(&done_allocate, Label::kNear);
1852
1853 __ bind(&allocate);
1854 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1855 __ CallRuntime(Runtime::kAllocateInNewSpace);
1856
1857 __ bind(&done_allocate);
1858 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
1859 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
1860 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
1861 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
1862 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
1863 __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
1864 __ LoadRoot(FieldOperand(rax, JSIteratorResult::kDoneOffset),
1865 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1866 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
Ben Murdochda12d292016-06-02 14:46:10 +01001867 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001868}
1869
1870
1871void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1872 Token::Value op,
1873 Expression* left,
1874 Expression* right) {
1875 // Do combined smi check of the operands. Left operand is on the
1876 // stack (popped into rdx). Right operand is in rax but moved into
1877 // rcx to make the shifts easier.
1878 Label done, stub_call, smi_case;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001879 PopOperand(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001880 __ movp(rcx, rax);
1881 __ orp(rax, rdx);
1882 JumpPatchSite patch_site(masm_);
1883 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1884
1885 __ bind(&stub_call);
1886 __ movp(rax, rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001887 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001888 CallIC(code, expr->BinaryOperationFeedbackId());
1889 patch_site.EmitPatchInfo();
1890 __ jmp(&done, Label::kNear);
1891
1892 __ bind(&smi_case);
1893 switch (op) {
1894 case Token::SAR:
1895 __ SmiShiftArithmeticRight(rax, rdx, rcx);
1896 break;
1897 case Token::SHL:
1898 __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
1899 break;
1900 case Token::SHR:
1901 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1902 break;
1903 case Token::ADD:
1904 __ SmiAdd(rax, rdx, rcx, &stub_call);
1905 break;
1906 case Token::SUB:
1907 __ SmiSub(rax, rdx, rcx, &stub_call);
1908 break;
1909 case Token::MUL:
1910 __ SmiMul(rax, rdx, rcx, &stub_call);
1911 break;
1912 case Token::BIT_OR:
1913 __ SmiOr(rax, rdx, rcx);
1914 break;
1915 case Token::BIT_AND:
1916 __ SmiAnd(rax, rdx, rcx);
1917 break;
1918 case Token::BIT_XOR:
1919 __ SmiXor(rax, rdx, rcx);
1920 break;
1921 default:
1922 UNREACHABLE();
1923 break;
1924 }
1925
1926 __ bind(&done);
1927 context()->Plug(rax);
1928}
1929
1930
1931void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001932 for (int i = 0; i < lit->properties()->length(); i++) {
1933 ObjectLiteral::Property* property = lit->properties()->at(i);
1934 Expression* value = property->value();
1935
1936 if (property->is_static()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001937 PushOperand(Operand(rsp, kPointerSize)); // constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001938 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001939 PushOperand(Operand(rsp, 0)); // prototype
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001940 }
1941 EmitPropertyKey(property, lit->GetIdForProperty(i));
1942
1943 // The static prototype property is read only. We handle the non computed
1944 // property name case in the parser. Since this is the only case where we
1945 // need to check for an own read only property we special case this so we do
1946 // not need to do this for every property.
1947 if (property->is_static() && property->is_computed_name()) {
1948 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1949 __ Push(rax);
1950 }
1951
1952 VisitForStackValue(value);
1953 if (NeedsHomeObject(value)) {
1954 EmitSetHomeObject(value, 2, property->GetSlot());
1955 }
1956
1957 switch (property->kind()) {
1958 case ObjectLiteral::Property::CONSTANT:
1959 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1960 case ObjectLiteral::Property::PROTOTYPE:
1961 UNREACHABLE();
1962 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001963 PushOperand(Smi::FromInt(DONT_ENUM));
1964 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1965 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001966 break;
1967
1968 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001969 PushOperand(Smi::FromInt(DONT_ENUM));
1970 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001971 break;
1972
1973 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001974 PushOperand(Smi::FromInt(DONT_ENUM));
1975 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001976 break;
1977
1978 default:
1979 UNREACHABLE();
1980 }
1981 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001982}
1983
1984
1985void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001986 PopOperand(rdx);
1987 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001988 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1989 CallIC(code, expr->BinaryOperationFeedbackId());
1990 patch_site.EmitPatchInfo();
1991 context()->Plug(rax);
1992}
1993
1994
1995void FullCodeGenerator::EmitAssignment(Expression* expr,
1996 FeedbackVectorSlot slot) {
1997 DCHECK(expr->IsValidReferenceExpressionOrThis());
1998
1999 Property* prop = expr->AsProperty();
2000 LhsKind assign_type = Property::GetAssignType(prop);
2001
2002 switch (assign_type) {
2003 case VARIABLE: {
2004 Variable* var = expr->AsVariableProxy()->var();
2005 EffectContext context(this);
2006 EmitVariableAssignment(var, Token::ASSIGN, slot);
2007 break;
2008 }
2009 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002010 PushOperand(rax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002011 VisitForAccumulatorValue(prop->obj());
2012 __ Move(StoreDescriptor::ReceiverRegister(), rax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002013 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002014 __ Move(StoreDescriptor::NameRegister(),
2015 prop->key()->AsLiteral()->value());
2016 EmitLoadStoreICSlot(slot);
2017 CallStoreIC();
2018 break;
2019 }
2020 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002021 PushOperand(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002022 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2023 VisitForAccumulatorValue(
2024 prop->obj()->AsSuperPropertyReference()->home_object());
2025 // stack: value, this; rax: home_object
2026 Register scratch = rcx;
2027 Register scratch2 = rdx;
2028 __ Move(scratch, result_register()); // home_object
2029 __ movp(rax, MemOperand(rsp, kPointerSize)); // value
2030 __ movp(scratch2, MemOperand(rsp, 0)); // this
2031 __ movp(MemOperand(rsp, kPointerSize), scratch2); // this
2032 __ movp(MemOperand(rsp, 0), scratch); // home_object
2033 // stack: this, home_object; rax: value
2034 EmitNamedSuperPropertyStore(prop);
2035 break;
2036 }
2037 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002038 PushOperand(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002039 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2040 VisitForStackValue(
2041 prop->obj()->AsSuperPropertyReference()->home_object());
2042 VisitForAccumulatorValue(prop->key());
2043 Register scratch = rcx;
2044 Register scratch2 = rdx;
2045 __ movp(scratch2, MemOperand(rsp, 2 * kPointerSize)); // value
2046 // stack: value, this, home_object; rax: key, rdx: value
2047 __ movp(scratch, MemOperand(rsp, kPointerSize)); // this
2048 __ movp(MemOperand(rsp, 2 * kPointerSize), scratch);
2049 __ movp(scratch, MemOperand(rsp, 0)); // home_object
2050 __ movp(MemOperand(rsp, kPointerSize), scratch);
2051 __ movp(MemOperand(rsp, 0), rax);
2052 __ Move(rax, scratch2);
2053 // stack: this, home_object, key; rax: value.
2054 EmitKeyedSuperPropertyStore(prop);
2055 break;
2056 }
2057 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002058 PushOperand(rax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002059 VisitForStackValue(prop->obj());
2060 VisitForAccumulatorValue(prop->key());
2061 __ Move(StoreDescriptor::NameRegister(), rax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002062 PopOperand(StoreDescriptor::ReceiverRegister());
2063 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002064 EmitLoadStoreICSlot(slot);
2065 Handle<Code> ic =
2066 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2067 CallIC(ic);
2068 break;
2069 }
2070 }
2071 context()->Plug(rax);
2072}
2073
2074
2075void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2076 Variable* var, MemOperand location) {
2077 __ movp(location, rax);
2078 if (var->IsContextSlot()) {
2079 __ movp(rdx, rax);
2080 __ RecordWriteContextSlot(
2081 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2082 }
2083}
2084
2085
2086void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2087 FeedbackVectorSlot slot) {
2088 if (var->IsUnallocated()) {
2089 // Global var, const, or let.
2090 __ Move(StoreDescriptor::NameRegister(), var->name());
2091 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2092 EmitLoadStoreICSlot(slot);
2093 CallStoreIC();
2094
2095 } else if (var->mode() == LET && op != Token::INIT) {
2096 // Non-initializing assignment to let variable needs a write barrier.
2097 DCHECK(!var->IsLookupSlot());
2098 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2099 Label assign;
2100 MemOperand location = VarOperand(var, rcx);
2101 __ movp(rdx, location);
2102 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2103 __ j(not_equal, &assign, Label::kNear);
2104 __ Push(var->name());
2105 __ CallRuntime(Runtime::kThrowReferenceError);
2106 __ bind(&assign);
2107 EmitStoreToStackLocalOrContextSlot(var, location);
2108
2109 } else if (var->mode() == CONST && op != Token::INIT) {
2110 // Assignment to const variable needs a write barrier.
2111 DCHECK(!var->IsLookupSlot());
2112 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2113 Label const_error;
2114 MemOperand location = VarOperand(var, rcx);
2115 __ movp(rdx, location);
2116 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2117 __ j(not_equal, &const_error, Label::kNear);
2118 __ Push(var->name());
2119 __ CallRuntime(Runtime::kThrowReferenceError);
2120 __ bind(&const_error);
2121 __ CallRuntime(Runtime::kThrowConstAssignError);
2122
2123 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2124 // Initializing assignment to const {this} needs a write barrier.
2125 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2126 Label uninitialized_this;
2127 MemOperand location = VarOperand(var, rcx);
2128 __ movp(rdx, location);
2129 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2130 __ j(equal, &uninitialized_this);
2131 __ Push(var->name());
2132 __ CallRuntime(Runtime::kThrowReferenceError);
2133 __ bind(&uninitialized_this);
2134 EmitStoreToStackLocalOrContextSlot(var, location);
2135
Ben Murdochc5610432016-08-08 18:44:38 +01002136 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002137 if (var->IsLookupSlot()) {
2138 // Assignment to var.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002139 __ Push(var->name());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002140 __ Push(rax);
2141 __ CallRuntime(is_strict(language_mode())
2142 ? Runtime::kStoreLookupSlot_Strict
2143 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002144 } else {
2145 // Assignment to var or initializing assignment to let/const in harmony
2146 // mode.
2147 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2148 MemOperand location = VarOperand(var, rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002149 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002150 // Check for an uninitialized let binding.
2151 __ movp(rdx, location);
2152 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2153 __ Check(equal, kLetBindingReInitialization);
2154 }
2155 EmitStoreToStackLocalOrContextSlot(var, location);
2156 }
2157
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002158 } else {
2159 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2160 if (is_strict(language_mode())) {
2161 __ CallRuntime(Runtime::kThrowConstAssignError);
2162 }
2163 // Silently ignore store in sloppy mode.
2164 }
2165}
2166
2167
2168void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2169 // Assignment to a property, using a named store IC.
2170 Property* prop = expr->target()->AsProperty();
2171 DCHECK(prop != NULL);
2172 DCHECK(prop->key()->IsLiteral());
2173
2174 __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002175 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002176 EmitLoadStoreICSlot(expr->AssignmentSlot());
2177 CallStoreIC();
2178
Ben Murdochc5610432016-08-08 18:44:38 +01002179 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002180 context()->Plug(rax);
2181}
2182
2183
2184void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2185 // Assignment to named property of super.
2186 // rax : value
2187 // stack : receiver ('this'), home_object
2188 DCHECK(prop != NULL);
2189 Literal* key = prop->key()->AsLiteral();
2190 DCHECK(key != NULL);
2191
Ben Murdoch097c5b22016-05-18 11:27:45 +01002192 PushOperand(key->value());
2193 PushOperand(rax);
2194 CallRuntimeWithOperands(is_strict(language_mode())
2195 ? Runtime::kStoreToSuper_Strict
2196 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002197}
2198
2199
2200void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2201 // Assignment to named property of super.
2202 // rax : value
2203 // stack : receiver ('this'), home_object, key
2204 DCHECK(prop != NULL);
2205
Ben Murdoch097c5b22016-05-18 11:27:45 +01002206 PushOperand(rax);
2207 CallRuntimeWithOperands(is_strict(language_mode())
2208 ? Runtime::kStoreKeyedToSuper_Strict
2209 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002210}
2211
2212
2213void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2214 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002215 PopOperand(StoreDescriptor::NameRegister()); // Key.
2216 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002217 DCHECK(StoreDescriptor::ValueRegister().is(rax));
2218 Handle<Code> ic =
2219 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2220 EmitLoadStoreICSlot(expr->AssignmentSlot());
2221 CallIC(ic);
2222
Ben Murdochc5610432016-08-08 18:44:38 +01002223 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002224 context()->Plug(rax);
2225}
2226
2227
2228void FullCodeGenerator::CallIC(Handle<Code> code,
2229 TypeFeedbackId ast_id) {
2230 ic_total_count_++;
2231 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2232}
2233
2234
2235// Code common for calls using the IC.
2236void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2237 Expression* callee = expr->expression();
2238
2239 // Get the target function.
2240 ConvertReceiverMode convert_mode;
2241 if (callee->IsVariableProxy()) {
2242 { StackValueContext context(this);
2243 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002244 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002245 }
2246 // Push undefined as receiver. This is patched in the Call builtin if it
2247 // is a sloppy mode method.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002248 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002249 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2250 } else {
2251 // Load the function from the receiver.
2252 DCHECK(callee->IsProperty());
2253 DCHECK(!callee->AsProperty()->IsSuperAccess());
2254 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2255 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002256 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2257 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002258 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002259 PushOperand(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002260 __ movp(Operand(rsp, kPointerSize), rax);
2261 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2262 }
2263
2264 EmitCall(expr, convert_mode);
2265}
2266
2267
2268void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2269 Expression* callee = expr->expression();
2270 DCHECK(callee->IsProperty());
2271 Property* prop = callee->AsProperty();
2272 DCHECK(prop->IsSuperAccess());
2273 SetExpressionPosition(prop);
2274
2275 Literal* key = prop->key()->AsLiteral();
2276 DCHECK(!key->value()->IsSmi());
2277 // Load the function from the receiver.
2278 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2279 VisitForStackValue(super_ref->home_object());
2280 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002281 PushOperand(rax);
2282 PushOperand(rax);
2283 PushOperand(Operand(rsp, kPointerSize * 2));
2284 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002285
2286 // Stack here:
2287 // - home_object
2288 // - this (receiver)
2289 // - this (receiver) <-- LoadFromSuper will pop here and below.
2290 // - home_object
2291 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002292 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002293 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294
2295 // Replace home_object with target function.
2296 __ movp(Operand(rsp, kPointerSize), rax);
2297
2298 // Stack here:
2299 // - target function
2300 // - this (receiver)
2301 EmitCall(expr);
2302}
2303
2304
2305// Common code for calls using the IC.
2306void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2307 Expression* key) {
2308 // Load the key.
2309 VisitForAccumulatorValue(key);
2310
2311 Expression* callee = expr->expression();
2312
2313 // Load the function from the receiver.
2314 DCHECK(callee->IsProperty());
2315 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2316 __ Move(LoadDescriptor::NameRegister(), rax);
2317 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002318 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2319 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002320
2321 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002322 PushOperand(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002323 __ movp(Operand(rsp, kPointerSize), rax);
2324
2325 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2326}
2327
2328
2329void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2330 Expression* callee = expr->expression();
2331 DCHECK(callee->IsProperty());
2332 Property* prop = callee->AsProperty();
2333 DCHECK(prop->IsSuperAccess());
2334
2335 SetExpressionPosition(prop);
2336 // Load the function from the receiver.
2337 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2338 VisitForStackValue(super_ref->home_object());
2339 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002340 PushOperand(rax);
2341 PushOperand(rax);
2342 PushOperand(Operand(rsp, kPointerSize * 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002343 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002344
2345 // Stack here:
2346 // - home_object
2347 // - this (receiver)
2348 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2349 // - home_object
2350 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002351 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002352 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002353
2354 // Replace home_object with target function.
2355 __ movp(Operand(rsp, kPointerSize), rax);
2356
2357 // Stack here:
2358 // - target function
2359 // - this (receiver)
2360 EmitCall(expr);
2361}
2362
2363
2364void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2365 // Load the arguments.
2366 ZoneList<Expression*>* args = expr->arguments();
2367 int arg_count = args->length();
2368 for (int i = 0; i < arg_count; i++) {
2369 VisitForStackValue(args->at(i));
2370 }
2371
Ben Murdochc5610432016-08-08 18:44:38 +01002372 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002373 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002374 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2375 if (FLAG_trace) {
2376 __ CallRuntime(Runtime::kTraceTailCall);
2377 }
2378 // Update profiling counters before the tail call since we will
2379 // not return to this function.
2380 EmitProfilingCounterHandlingForReturnSequence(true);
2381 }
2382 Handle<Code> ic =
2383 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2384 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002385 __ Move(rdx, SmiFromSlot(expr->CallFeedbackICSlot()));
2386 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2387 // Don't assign a type feedback id to the IC, since type feedback is provided
2388 // by the vector above.
2389 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002390 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002391
2392 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002393 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002394 // Discard the function left on TOS.
2395 context()->DropAndPlug(1, rax);
2396}
2397
Ben Murdochc5610432016-08-08 18:44:38 +01002398void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2399 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002400 // Push copy of the first argument or undefined if it doesn't exist.
2401 if (arg_count > 0) {
2402 __ Push(Operand(rsp, arg_count * kPointerSize));
2403 } else {
2404 __ PushRoot(Heap::kUndefinedValueRootIndex);
2405 }
2406
2407 // Push the enclosing function.
2408 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2409
2410 // Push the language mode.
2411 __ Push(Smi::FromInt(language_mode()));
2412
2413 // Push the start position of the scope the calls resides in.
2414 __ Push(Smi::FromInt(scope()->start_position()));
2415
Ben Murdochc5610432016-08-08 18:44:38 +01002416 // Push the source position of the eval call.
2417 __ Push(Smi::FromInt(expr->position()));
2418
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002419 // Do the runtime call.
2420 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2421}
2422
2423
2424// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2425void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2426 VariableProxy* callee = expr->expression()->AsVariableProxy();
2427 if (callee->var()->IsLookupSlot()) {
2428 Label slow, done;
2429 SetExpressionPosition(callee);
2430 // Generate code for loading from variables potentially shadowed by
2431 // eval-introduced variables.
2432 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2433 __ bind(&slow);
2434 // Call the runtime to find the function to call (returned in rax) and
2435 // the object holding it (returned in rdx).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002436 __ Push(callee->name());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002437 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2438 PushOperand(rax); // Function.
2439 PushOperand(rdx); // Receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002440 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002441
2442 // If fast case code has been generated, emit code to push the function
2443 // and receiver and have the slow path jump around this code.
2444 if (done.is_linked()) {
2445 Label call;
2446 __ jmp(&call, Label::kNear);
2447 __ bind(&done);
2448 // Push function.
2449 __ Push(rax);
2450 // Pass undefined as the receiver, which is the WithBaseObject of a
2451 // non-object environment record. If the callee is sloppy, it will patch
2452 // it up to be the global receiver.
2453 __ PushRoot(Heap::kUndefinedValueRootIndex);
2454 __ bind(&call);
2455 }
2456 } else {
2457 VisitForStackValue(callee);
2458 // refEnv.WithBaseObject()
Ben Murdoch097c5b22016-05-18 11:27:45 +01002459 OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002460 __ PushRoot(Heap::kUndefinedValueRootIndex);
2461 }
2462}
2463
2464
2465void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002466 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002467 // to resolve the function we need to call. Then we call the resolved
2468 // function using the given arguments.
2469 ZoneList<Expression*>* args = expr->arguments();
2470 int arg_count = args->length();
2471 PushCalleeAndWithBaseObject(expr);
2472
2473 // Push the arguments.
2474 for (int i = 0; i < arg_count; i++) {
2475 VisitForStackValue(args->at(i));
2476 }
2477
2478 // Push a copy of the function (found below the arguments) and resolve
2479 // eval.
2480 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01002481 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002482
2483 // Touch up the callee.
2484 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2485
Ben Murdochc5610432016-08-08 18:44:38 +01002486 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002487
2488 SetCallPosition(expr);
2489 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2490 __ Set(rax, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002491 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2492 expr->tail_call_mode()),
2493 RelocInfo::CODE_TARGET);
2494 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002495 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002496 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002497 context()->DropAndPlug(1, rax);
2498}
2499
2500
2501void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2502 Comment cmnt(masm_, "[ CallNew");
2503 // According to ECMA-262, section 11.2.2, page 44, the function
2504 // expression in new calls must be evaluated before the
2505 // arguments.
2506
2507 // Push constructor on the stack. If it's not a function it's used as
2508 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2509 // ignored.
2510 DCHECK(!expr->expression()->IsSuperPropertyReference());
2511 VisitForStackValue(expr->expression());
2512
2513 // Push the arguments ("left-to-right") on the stack.
2514 ZoneList<Expression*>* args = expr->arguments();
2515 int arg_count = args->length();
2516 for (int i = 0; i < arg_count; i++) {
2517 VisitForStackValue(args->at(i));
2518 }
2519
2520 // Call the construct call builtin that handles allocation and
2521 // constructor invocation.
2522 SetConstructCallPosition(expr);
2523
2524 // Load function and argument count into rdi and rax.
2525 __ Set(rax, arg_count);
2526 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2527
2528 // Record call targets in unoptimized code, but not in the snapshot.
2529 __ EmitLoadTypeFeedbackVector(rbx);
2530 __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
2531
2532 CallConstructStub stub(isolate());
2533 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002534 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002535 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2536 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002537 context()->Plug(rax);
2538}
2539
2540
2541void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2542 SuperCallReference* super_call_ref =
2543 expr->expression()->AsSuperCallReference();
2544 DCHECK_NOT_NULL(super_call_ref);
2545
2546 // Push the super constructor target on the stack (may be null,
2547 // but the Construct builtin can deal with that properly).
2548 VisitForAccumulatorValue(super_call_ref->this_function_var());
2549 __ AssertFunction(result_register());
2550 __ movp(result_register(),
2551 FieldOperand(result_register(), HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002552 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002553
2554 // Push the arguments ("left-to-right") on the stack.
2555 ZoneList<Expression*>* args = expr->arguments();
2556 int arg_count = args->length();
2557 for (int i = 0; i < arg_count; i++) {
2558 VisitForStackValue(args->at(i));
2559 }
2560
2561 // Call the construct call builtin that handles allocation and
2562 // constructor invocation.
2563 SetConstructCallPosition(expr);
2564
2565 // Load new target into rdx.
2566 VisitForAccumulatorValue(super_call_ref->new_target_var());
2567 __ movp(rdx, result_register());
2568
2569 // Load function and argument count into rdi and rax.
2570 __ Set(rax, arg_count);
2571 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2572
2573 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002574 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002575
2576 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002577 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002578 context()->Plug(rax);
2579}
2580
2581
2582void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2583 ZoneList<Expression*>* args = expr->arguments();
2584 DCHECK(args->length() == 1);
2585
2586 VisitForAccumulatorValue(args->at(0));
2587
2588 Label materialize_true, materialize_false;
2589 Label* if_true = NULL;
2590 Label* if_false = NULL;
2591 Label* fall_through = NULL;
2592 context()->PrepareTest(&materialize_true, &materialize_false,
2593 &if_true, &if_false, &fall_through);
2594
2595 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2596 __ JumpIfSmi(rax, if_true);
2597 __ jmp(if_false);
2598
2599 context()->Plug(if_true, if_false);
2600}
2601
2602
2603void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2604 ZoneList<Expression*>* args = expr->arguments();
2605 DCHECK(args->length() == 1);
2606
2607 VisitForAccumulatorValue(args->at(0));
2608
2609 Label materialize_true, materialize_false;
2610 Label* if_true = NULL;
2611 Label* if_false = NULL;
2612 Label* fall_through = NULL;
2613 context()->PrepareTest(&materialize_true, &materialize_false,
2614 &if_true, &if_false, &fall_through);
2615
2616 __ JumpIfSmi(rax, if_false);
2617 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rbx);
2618 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2619 Split(above_equal, if_true, if_false, fall_through);
2620
2621 context()->Plug(if_true, if_false);
2622}
2623
2624
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002625void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2626 ZoneList<Expression*>* args = expr->arguments();
2627 DCHECK(args->length() == 1);
2628
2629 VisitForAccumulatorValue(args->at(0));
2630
2631 Label materialize_true, materialize_false;
2632 Label* if_true = NULL;
2633 Label* if_false = NULL;
2634 Label* fall_through = NULL;
2635 context()->PrepareTest(&materialize_true, &materialize_false,
2636 &if_true, &if_false, &fall_through);
2637
2638 __ JumpIfSmi(rax, if_false);
2639 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
2640 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2641 Split(equal, if_true, if_false, fall_through);
2642
2643 context()->Plug(if_true, if_false);
2644}
2645
2646
2647void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2648 ZoneList<Expression*>* args = expr->arguments();
2649 DCHECK(args->length() == 1);
2650
2651 VisitForAccumulatorValue(args->at(0));
2652
2653 Label materialize_true, materialize_false;
2654 Label* if_true = NULL;
2655 Label* if_false = NULL;
2656 Label* fall_through = NULL;
2657 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2658 &if_false, &fall_through);
2659
2660 __ JumpIfSmi(rax, if_false);
2661 __ CmpObjectType(rax, JS_TYPED_ARRAY_TYPE, rbx);
2662 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2663 Split(equal, if_true, if_false, fall_through);
2664
2665 context()->Plug(if_true, if_false);
2666}
2667
2668
2669void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2670 ZoneList<Expression*>* args = expr->arguments();
2671 DCHECK(args->length() == 1);
2672
2673 VisitForAccumulatorValue(args->at(0));
2674
2675 Label materialize_true, materialize_false;
2676 Label* if_true = NULL;
2677 Label* if_false = NULL;
2678 Label* fall_through = NULL;
2679 context()->PrepareTest(&materialize_true, &materialize_false,
2680 &if_true, &if_false, &fall_through);
2681
2682 __ JumpIfSmi(rax, if_false);
2683 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
2684 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2685 Split(equal, if_true, if_false, fall_through);
2686
2687 context()->Plug(if_true, if_false);
2688}
2689
2690
2691void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2692 ZoneList<Expression*>* args = expr->arguments();
2693 DCHECK(args->length() == 1);
2694
2695 VisitForAccumulatorValue(args->at(0));
2696
2697 Label materialize_true, materialize_false;
2698 Label* if_true = NULL;
2699 Label* if_false = NULL;
2700 Label* fall_through = NULL;
2701 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2702 &if_false, &fall_through);
2703
2704
2705 __ JumpIfSmi(rax, if_false);
2706 __ CmpObjectType(rax, JS_PROXY_TYPE, rbx);
2707 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2708 Split(equal, if_true, if_false, fall_through);
2709
2710 context()->Plug(if_true, if_false);
2711}
2712
2713
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002714void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2715 ZoneList<Expression*>* args = expr->arguments();
2716 DCHECK(args->length() == 1);
2717 Label done, null, function, non_function_constructor;
2718
2719 VisitForAccumulatorValue(args->at(0));
2720
2721 // If the object is not a JSReceiver, we return null.
2722 __ JumpIfSmi(rax, &null, Label::kNear);
2723 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2724 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rax);
2725 __ j(below, &null, Label::kNear);
2726
Ben Murdochda12d292016-06-02 14:46:10 +01002727 // Return 'Function' for JSFunction and JSBoundFunction objects.
2728 __ CmpInstanceType(rax, FIRST_FUNCTION_TYPE);
2729 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2730 __ j(above_equal, &function, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002731
2732 // Check if the constructor in the map is a JS function.
2733 __ GetMapConstructor(rax, rax, rbx);
2734 __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
2735 __ j(not_equal, &non_function_constructor, Label::kNear);
2736
2737 // rax now contains the constructor function. Grab the
2738 // instance class name from there.
2739 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2740 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
2741 __ jmp(&done, Label::kNear);
2742
2743 // Non-JS objects have class null.
2744 __ bind(&null);
2745 __ LoadRoot(rax, Heap::kNullValueRootIndex);
2746 __ jmp(&done, Label::kNear);
2747
2748 // Functions have class 'Function'.
2749 __ bind(&function);
2750 __ LoadRoot(rax, Heap::kFunction_stringRootIndex);
2751 __ jmp(&done, Label::kNear);
2752
2753 // Objects with a non-function constructor have class 'Object'.
2754 __ bind(&non_function_constructor);
2755 __ LoadRoot(rax, Heap::kObject_stringRootIndex);
2756
2757 // All done.
2758 __ bind(&done);
2759
2760 context()->Plug(rax);
2761}
2762
2763
2764void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2765 ZoneList<Expression*>* args = expr->arguments();
2766 DCHECK(args->length() == 1);
2767
2768 VisitForAccumulatorValue(args->at(0)); // Load the object.
2769
2770 Label done;
2771 // If the object is a smi return the object.
2772 __ JumpIfSmi(rax, &done);
2773 // If the object is not a value type, return the object.
2774 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
2775 __ j(not_equal, &done);
2776 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
2777
2778 __ bind(&done);
2779 context()->Plug(rax);
2780}
2781
2782
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002783void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2784 ZoneList<Expression*>* args = expr->arguments();
2785 DCHECK_EQ(3, args->length());
2786
2787 Register string = rax;
2788 Register index = rbx;
2789 Register value = rcx;
2790
2791 VisitForStackValue(args->at(0)); // index
2792 VisitForStackValue(args->at(1)); // value
2793 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002794 PopOperand(value);
2795 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002796
2797 if (FLAG_debug_code) {
2798 __ Check(__ CheckSmi(value), kNonSmiValue);
2799 __ Check(__ CheckSmi(index), kNonSmiValue);
2800 }
2801
2802 __ SmiToInteger32(value, value);
2803 __ SmiToInteger32(index, index);
2804
2805 if (FLAG_debug_code) {
2806 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2807 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
2808 }
2809
2810 __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
2811 value);
2812 context()->Plug(string);
2813}
2814
2815
2816void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2817 ZoneList<Expression*>* args = expr->arguments();
2818 DCHECK_EQ(3, args->length());
2819
2820 Register string = rax;
2821 Register index = rbx;
2822 Register value = rcx;
2823
2824 VisitForStackValue(args->at(0)); // index
2825 VisitForStackValue(args->at(1)); // value
2826 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002827 PopOperand(value);
2828 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002829
2830 if (FLAG_debug_code) {
2831 __ Check(__ CheckSmi(value), kNonSmiValue);
2832 __ Check(__ CheckSmi(index), kNonSmiValue);
2833 }
2834
2835 __ SmiToInteger32(value, value);
2836 __ SmiToInteger32(index, index);
2837
2838 if (FLAG_debug_code) {
2839 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2840 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
2841 }
2842
2843 __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
2844 value);
2845 context()->Plug(rax);
2846}
2847
2848
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002849void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2850 ZoneList<Expression*>* args = expr->arguments();
2851 DCHECK(args->length() == 1);
2852
2853 VisitForAccumulatorValue(args->at(0));
2854
2855 Label done;
2856 StringCharFromCodeGenerator generator(rax, rbx);
2857 generator.GenerateFast(masm_);
2858 __ jmp(&done);
2859
2860 NopRuntimeCallHelper call_helper;
2861 generator.GenerateSlow(masm_, call_helper);
2862
2863 __ bind(&done);
2864 context()->Plug(rbx);
2865}
2866
2867
2868void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2869 ZoneList<Expression*>* args = expr->arguments();
2870 DCHECK(args->length() == 2);
2871
2872 VisitForStackValue(args->at(0));
2873 VisitForAccumulatorValue(args->at(1));
2874
2875 Register object = rbx;
2876 Register index = rax;
2877 Register result = rdx;
2878
Ben Murdoch097c5b22016-05-18 11:27:45 +01002879 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002880
2881 Label need_conversion;
2882 Label index_out_of_range;
2883 Label done;
2884 StringCharCodeAtGenerator generator(object,
2885 index,
2886 result,
2887 &need_conversion,
2888 &need_conversion,
2889 &index_out_of_range,
2890 STRING_INDEX_IS_NUMBER);
2891 generator.GenerateFast(masm_);
2892 __ jmp(&done);
2893
2894 __ bind(&index_out_of_range);
2895 // When the index is out of range, the spec requires us to return
2896 // NaN.
2897 __ LoadRoot(result, Heap::kNanValueRootIndex);
2898 __ jmp(&done);
2899
2900 __ bind(&need_conversion);
2901 // Move the undefined value into the result register, which will
2902 // trigger conversion.
2903 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2904 __ jmp(&done);
2905
2906 NopRuntimeCallHelper call_helper;
2907 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2908
2909 __ bind(&done);
2910 context()->Plug(result);
2911}
2912
2913
2914void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
2915 ZoneList<Expression*>* args = expr->arguments();
2916 DCHECK(args->length() == 2);
2917
2918 VisitForStackValue(args->at(0));
2919 VisitForAccumulatorValue(args->at(1));
2920
2921 Register object = rbx;
2922 Register index = rax;
2923 Register scratch = rdx;
2924 Register result = rax;
2925
Ben Murdoch097c5b22016-05-18 11:27:45 +01002926 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002927
2928 Label need_conversion;
2929 Label index_out_of_range;
2930 Label done;
2931 StringCharAtGenerator generator(object,
2932 index,
2933 scratch,
2934 result,
2935 &need_conversion,
2936 &need_conversion,
2937 &index_out_of_range,
2938 STRING_INDEX_IS_NUMBER);
2939 generator.GenerateFast(masm_);
2940 __ jmp(&done);
2941
2942 __ bind(&index_out_of_range);
2943 // When the index is out of range, the spec requires us to return
2944 // the empty string.
2945 __ LoadRoot(result, Heap::kempty_stringRootIndex);
2946 __ jmp(&done);
2947
2948 __ bind(&need_conversion);
2949 // Move smi zero into the result register, which will trigger
2950 // conversion.
2951 __ Move(result, Smi::FromInt(0));
2952 __ jmp(&done);
2953
2954 NopRuntimeCallHelper call_helper;
2955 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2956
2957 __ bind(&done);
2958 context()->Plug(result);
2959}
2960
2961
2962void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2963 ZoneList<Expression*>* args = expr->arguments();
2964 DCHECK_LE(2, args->length());
2965 // Push target, receiver and arguments onto the stack.
2966 for (Expression* const arg : *args) {
2967 VisitForStackValue(arg);
2968 }
Ben Murdochc5610432016-08-08 18:44:38 +01002969 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002970 // Move target to rdi.
2971 int const argc = args->length() - 2;
2972 __ movp(rdi, Operand(rsp, (argc + 1) * kPointerSize));
2973 // Call the target.
2974 __ Set(rax, argc);
2975 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002976 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002977 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002978 // Discard the function left on TOS.
2979 context()->DropAndPlug(1, rax);
2980}
2981
2982
2983void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2984 ZoneList<Expression*>* args = expr->arguments();
2985 DCHECK(args->length() == 1);
2986
2987 VisitForAccumulatorValue(args->at(0));
2988
2989 Label materialize_true, materialize_false;
2990 Label* if_true = NULL;
2991 Label* if_false = NULL;
2992 Label* fall_through = NULL;
2993 context()->PrepareTest(&materialize_true, &materialize_false,
2994 &if_true, &if_false, &fall_through);
2995
2996 __ testl(FieldOperand(rax, String::kHashFieldOffset),
2997 Immediate(String::kContainsCachedArrayIndexMask));
2998 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2999 __ j(zero, if_true);
3000 __ jmp(if_false);
3001
3002 context()->Plug(if_true, if_false);
3003}
3004
3005
3006void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3007 ZoneList<Expression*>* args = expr->arguments();
3008 DCHECK(args->length() == 1);
3009 VisitForAccumulatorValue(args->at(0));
3010
3011 __ AssertString(rax);
3012
3013 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3014 DCHECK(String::kHashShift >= kSmiTagSize);
3015 __ IndexFromHash(rax, rax);
3016
3017 context()->Plug(rax);
3018}
3019
3020
3021void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3022 ZoneList<Expression*>* args = expr->arguments();
3023 DCHECK_EQ(1, args->length());
3024 VisitForAccumulatorValue(args->at(0));
3025 __ AssertFunction(rax);
3026 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
3027 __ movp(rax, FieldOperand(rax, Map::kPrototypeOffset));
3028 context()->Plug(rax);
3029}
3030
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003031void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3032 DCHECK(expr->arguments()->length() == 0);
3033 ExternalReference debug_is_active =
3034 ExternalReference::debug_is_active_address(isolate());
3035 __ Move(kScratchRegister, debug_is_active);
3036 __ movzxbp(rax, Operand(kScratchRegister, 0));
3037 __ Integer32ToSmi(rax, rax);
3038 context()->Plug(rax);
3039}
3040
3041
3042void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3043 ZoneList<Expression*>* args = expr->arguments();
3044 DCHECK_EQ(2, args->length());
3045 VisitForStackValue(args->at(0));
3046 VisitForStackValue(args->at(1));
3047
3048 Label runtime, done;
3049
Ben Murdochc5610432016-08-08 18:44:38 +01003050 __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &runtime,
3051 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003052 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
3053 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
3054 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
3055 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
3056 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
3057 __ Pop(FieldOperand(rax, JSIteratorResult::kDoneOffset));
3058 __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
3059 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3060 __ jmp(&done, Label::kNear);
3061
3062 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003063 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003064
3065 __ bind(&done);
3066 context()->Plug(rax);
3067}
3068
3069
3070void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003071 // Push function.
3072 __ LoadNativeContextSlot(expr->context_index(), rax);
3073 PushOperand(rax);
3074
3075 // Push undefined as receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003076 OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003077 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003078}
3079
3080
3081void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3082 ZoneList<Expression*>* args = expr->arguments();
3083 int arg_count = args->length();
3084
3085 SetCallPosition(expr);
3086 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
3087 __ Set(rax, arg_count);
3088 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3089 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003090 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003091 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003092}
3093
3094
3095void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3096 switch (expr->op()) {
3097 case Token::DELETE: {
3098 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3099 Property* property = expr->expression()->AsProperty();
3100 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3101
3102 if (property != NULL) {
3103 VisitForStackValue(property->obj());
3104 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003105 CallRuntimeWithOperands(is_strict(language_mode())
3106 ? Runtime::kDeleteProperty_Strict
3107 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003108 context()->Plug(rax);
3109 } else if (proxy != NULL) {
3110 Variable* var = proxy->var();
3111 // Delete of an unqualified identifier is disallowed in strict mode but
3112 // "delete this" is allowed.
3113 bool is_this = var->HasThisName(isolate());
3114 DCHECK(is_sloppy(language_mode()) || is_this);
3115 if (var->IsUnallocatedOrGlobalSlot()) {
3116 __ movp(rax, NativeContextOperand());
3117 __ Push(ContextOperand(rax, Context::EXTENSION_INDEX));
3118 __ Push(var->name());
3119 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3120 context()->Plug(rax);
3121 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3122 // Result of deleting non-global variables is false. 'this' is
3123 // not really a variable, though we implement it as one. The
3124 // subexpression does not have side effects.
3125 context()->Plug(is_this);
3126 } else {
3127 // Non-global variable. Call the runtime to try to delete from the
3128 // context where the variable was introduced.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003129 __ Push(var->name());
3130 __ CallRuntime(Runtime::kDeleteLookupSlot);
3131 context()->Plug(rax);
3132 }
3133 } else {
3134 // Result of deleting non-property, non-variable reference is true.
3135 // The subexpression may have side effects.
3136 VisitForEffect(expr->expression());
3137 context()->Plug(true);
3138 }
3139 break;
3140 }
3141
3142 case Token::VOID: {
3143 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3144 VisitForEffect(expr->expression());
3145 context()->Plug(Heap::kUndefinedValueRootIndex);
3146 break;
3147 }
3148
3149 case Token::NOT: {
3150 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3151 if (context()->IsEffect()) {
3152 // Unary NOT has no side effects so it's only necessary to visit the
3153 // subexpression. Match the optimizing compiler by not branching.
3154 VisitForEffect(expr->expression());
3155 } else if (context()->IsTest()) {
3156 const TestContext* test = TestContext::cast(context());
3157 // The labels are swapped for the recursive call.
3158 VisitForControl(expr->expression(),
3159 test->false_label(),
3160 test->true_label(),
3161 test->fall_through());
3162 context()->Plug(test->true_label(), test->false_label());
3163 } else {
3164 // We handle value contexts explicitly rather than simply visiting
3165 // for control and plugging the control flow into the context,
3166 // because we need to prepare a pair of extra administrative AST ids
3167 // for the optimizing compiler.
3168 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3169 Label materialize_true, materialize_false, done;
3170 VisitForControl(expr->expression(),
3171 &materialize_false,
3172 &materialize_true,
3173 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003174 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003175 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003176 PrepareForBailoutForId(expr->MaterializeTrueId(),
3177 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003178 if (context()->IsAccumulatorValue()) {
3179 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
3180 } else {
3181 __ PushRoot(Heap::kTrueValueRootIndex);
3182 }
3183 __ jmp(&done, Label::kNear);
3184 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003185 PrepareForBailoutForId(expr->MaterializeFalseId(),
3186 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003187 if (context()->IsAccumulatorValue()) {
3188 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
3189 } else {
3190 __ PushRoot(Heap::kFalseValueRootIndex);
3191 }
3192 __ bind(&done);
3193 }
3194 break;
3195 }
3196
3197 case Token::TYPEOF: {
3198 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3199 {
3200 AccumulatorValueContext context(this);
3201 VisitForTypeofValue(expr->expression());
3202 }
3203 __ movp(rbx, rax);
3204 TypeofStub typeof_stub(isolate());
3205 __ CallStub(&typeof_stub);
3206 context()->Plug(rax);
3207 break;
3208 }
3209
3210 default:
3211 UNREACHABLE();
3212 }
3213}
3214
3215
3216void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3217 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3218
3219 Comment cmnt(masm_, "[ CountOperation");
3220
3221 Property* prop = expr->expression()->AsProperty();
3222 LhsKind assign_type = Property::GetAssignType(prop);
3223
3224 // Evaluate expression and get value.
3225 if (assign_type == VARIABLE) {
3226 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3227 AccumulatorValueContext context(this);
3228 EmitVariableLoad(expr->expression()->AsVariableProxy());
3229 } else {
3230 // Reserve space for result of postfix operation.
3231 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003232 PushOperand(Smi::FromInt(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003233 }
3234 switch (assign_type) {
3235 case NAMED_PROPERTY: {
3236 VisitForStackValue(prop->obj());
3237 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
3238 EmitNamedPropertyLoad(prop);
3239 break;
3240 }
3241
3242 case NAMED_SUPER_PROPERTY: {
3243 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3244 VisitForAccumulatorValue(
3245 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003246 PushOperand(result_register());
3247 PushOperand(MemOperand(rsp, kPointerSize));
3248 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003249 EmitNamedSuperPropertyLoad(prop);
3250 break;
3251 }
3252
3253 case KEYED_SUPER_PROPERTY: {
3254 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3255 VisitForStackValue(
3256 prop->obj()->AsSuperPropertyReference()->home_object());
3257 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003258 PushOperand(result_register());
3259 PushOperand(MemOperand(rsp, 2 * kPointerSize));
3260 PushOperand(MemOperand(rsp, 2 * kPointerSize));
3261 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003262 EmitKeyedSuperPropertyLoad(prop);
3263 break;
3264 }
3265
3266 case KEYED_PROPERTY: {
3267 VisitForStackValue(prop->obj());
3268 VisitForStackValue(prop->key());
3269 // Leave receiver on stack
3270 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
3271 // Copy of key, needed for later store.
3272 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
3273 EmitKeyedPropertyLoad(prop);
3274 break;
3275 }
3276
3277 case VARIABLE:
3278 UNREACHABLE();
3279 }
3280 }
3281
3282 // We need a second deoptimization point after loading the value
3283 // in case evaluating the property load my have a side effect.
3284 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003285 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003286 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003287 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003288 }
3289
3290 // Inline smi case if we are in a loop.
3291 Label done, stub_call;
3292 JumpPatchSite patch_site(masm_);
3293 if (ShouldInlineSmiCase(expr->op())) {
3294 Label slow;
3295 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
3296
3297 // Save result for postfix expressions.
3298 if (expr->is_postfix()) {
3299 if (!context()->IsEffect()) {
3300 // Save the result on the stack. If we have a named or keyed property
3301 // we store the result under the receiver that is currently on top
3302 // of the stack.
3303 switch (assign_type) {
3304 case VARIABLE:
3305 __ Push(rax);
3306 break;
3307 case NAMED_PROPERTY:
3308 __ movp(Operand(rsp, kPointerSize), rax);
3309 break;
3310 case NAMED_SUPER_PROPERTY:
3311 __ movp(Operand(rsp, 2 * kPointerSize), rax);
3312 break;
3313 case KEYED_PROPERTY:
3314 __ movp(Operand(rsp, 2 * kPointerSize), rax);
3315 break;
3316 case KEYED_SUPER_PROPERTY:
3317 __ movp(Operand(rsp, 3 * kPointerSize), rax);
3318 break;
3319 }
3320 }
3321 }
3322
3323 SmiOperationConstraints constraints =
3324 SmiOperationConstraint::kPreserveSourceRegister |
3325 SmiOperationConstraint::kBailoutOnNoOverflow;
3326 if (expr->op() == Token::INC) {
3327 __ SmiAddConstant(rax, rax, Smi::FromInt(1), constraints, &done,
3328 Label::kNear);
3329 } else {
3330 __ SmiSubConstant(rax, rax, Smi::FromInt(1), constraints, &done,
3331 Label::kNear);
3332 }
3333 __ jmp(&stub_call, Label::kNear);
3334 __ bind(&slow);
3335 }
Ben Murdochda12d292016-06-02 14:46:10 +01003336
3337 // Convert old value into a number.
3338 ToNumberStub convert_stub(isolate());
3339 __ CallStub(&convert_stub);
Ben Murdochc5610432016-08-08 18:44:38 +01003340 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003341
3342 // Save result for postfix expressions.
3343 if (expr->is_postfix()) {
3344 if (!context()->IsEffect()) {
3345 // Save the result on the stack. If we have a named or keyed property
3346 // we store the result under the receiver that is currently on top
3347 // of the stack.
3348 switch (assign_type) {
3349 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003350 PushOperand(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003351 break;
3352 case NAMED_PROPERTY:
3353 __ movp(Operand(rsp, kPointerSize), rax);
3354 break;
3355 case NAMED_SUPER_PROPERTY:
3356 __ movp(Operand(rsp, 2 * kPointerSize), rax);
3357 break;
3358 case KEYED_PROPERTY:
3359 __ movp(Operand(rsp, 2 * kPointerSize), rax);
3360 break;
3361 case KEYED_SUPER_PROPERTY:
3362 __ movp(Operand(rsp, 3 * kPointerSize), rax);
3363 break;
3364 }
3365 }
3366 }
3367
3368 SetExpressionPosition(expr);
3369
3370 // Call stub for +1/-1.
3371 __ bind(&stub_call);
3372 __ movp(rdx, rax);
3373 __ Move(rax, Smi::FromInt(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003374 Handle<Code> code =
3375 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003376 CallIC(code, expr->CountBinOpFeedbackId());
3377 patch_site.EmitPatchInfo();
3378 __ bind(&done);
3379
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003380 // Store the value returned in rax.
3381 switch (assign_type) {
3382 case VARIABLE:
3383 if (expr->is_postfix()) {
3384 // Perform the assignment as if via '='.
3385 { EffectContext context(this);
3386 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3387 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003388 PrepareForBailoutForId(expr->AssignmentId(),
3389 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003390 context.Plug(rax);
3391 }
3392 // For all contexts except kEffect: We have the result on
3393 // top of the stack.
3394 if (!context()->IsEffect()) {
3395 context()->PlugTOS();
3396 }
3397 } else {
3398 // Perform the assignment as if via '='.
3399 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3400 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003401 PrepareForBailoutForId(expr->AssignmentId(),
3402 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003403 context()->Plug(rax);
3404 }
3405 break;
3406 case NAMED_PROPERTY: {
3407 __ Move(StoreDescriptor::NameRegister(),
3408 prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003409 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003410 EmitLoadStoreICSlot(expr->CountSlot());
3411 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003412 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003413 if (expr->is_postfix()) {
3414 if (!context()->IsEffect()) {
3415 context()->PlugTOS();
3416 }
3417 } else {
3418 context()->Plug(rax);
3419 }
3420 break;
3421 }
3422 case NAMED_SUPER_PROPERTY: {
3423 EmitNamedSuperPropertyStore(prop);
3424 if (expr->is_postfix()) {
3425 if (!context()->IsEffect()) {
3426 context()->PlugTOS();
3427 }
3428 } else {
3429 context()->Plug(rax);
3430 }
3431 break;
3432 }
3433 case KEYED_SUPER_PROPERTY: {
3434 EmitKeyedSuperPropertyStore(prop);
3435 if (expr->is_postfix()) {
3436 if (!context()->IsEffect()) {
3437 context()->PlugTOS();
3438 }
3439 } else {
3440 context()->Plug(rax);
3441 }
3442 break;
3443 }
3444 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003445 PopOperand(StoreDescriptor::NameRegister());
3446 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003447 Handle<Code> ic =
3448 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3449 EmitLoadStoreICSlot(expr->CountSlot());
3450 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003451 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003452 if (expr->is_postfix()) {
3453 if (!context()->IsEffect()) {
3454 context()->PlugTOS();
3455 }
3456 } else {
3457 context()->Plug(rax);
3458 }
3459 break;
3460 }
3461 }
3462}
3463
3464
3465void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3466 Expression* sub_expr,
3467 Handle<String> check) {
3468 Label materialize_true, materialize_false;
3469 Label* if_true = NULL;
3470 Label* if_false = NULL;
3471 Label* fall_through = NULL;
3472 context()->PrepareTest(&materialize_true, &materialize_false,
3473 &if_true, &if_false, &fall_through);
3474
3475 { AccumulatorValueContext context(this);
3476 VisitForTypeofValue(sub_expr);
3477 }
3478 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3479
3480 Factory* factory = isolate()->factory();
3481 if (String::Equals(check, factory->number_string())) {
3482 __ JumpIfSmi(rax, if_true);
3483 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
3484 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
3485 Split(equal, if_true, if_false, fall_through);
3486 } else if (String::Equals(check, factory->string_string())) {
3487 __ JumpIfSmi(rax, if_false);
3488 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
3489 Split(below, if_true, if_false, fall_through);
3490 } else if (String::Equals(check, factory->symbol_string())) {
3491 __ JumpIfSmi(rax, if_false);
3492 __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
3493 Split(equal, if_true, if_false, fall_through);
3494 } else if (String::Equals(check, factory->boolean_string())) {
3495 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3496 __ j(equal, if_true);
3497 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
3498 Split(equal, if_true, if_false, fall_through);
3499 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003500 __ CompareRoot(rax, Heap::kNullValueRootIndex);
3501 __ j(equal, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003502 __ JumpIfSmi(rax, if_false);
3503 // Check for undetectable objects => true.
3504 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3505 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3506 Immediate(1 << Map::kIsUndetectable));
3507 Split(not_zero, if_true, if_false, fall_through);
3508 } else if (String::Equals(check, factory->function_string())) {
3509 __ JumpIfSmi(rax, if_false);
3510 // Check for callable and not undetectable objects => true.
3511 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3512 __ movzxbl(rdx, FieldOperand(rdx, Map::kBitFieldOffset));
3513 __ andb(rdx,
3514 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3515 __ cmpb(rdx, Immediate(1 << Map::kIsCallable));
3516 Split(equal, if_true, if_false, fall_through);
3517 } else if (String::Equals(check, factory->object_string())) {
3518 __ JumpIfSmi(rax, if_false);
3519 __ CompareRoot(rax, Heap::kNullValueRootIndex);
3520 __ j(equal, if_true);
3521 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3522 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rdx);
3523 __ j(below, if_false);
3524 // Check for callable or undetectable objects => false.
3525 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3526 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3527 Split(zero, if_true, if_false, fall_through);
3528// clang-format off
3529#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3530 } else if (String::Equals(check, factory->type##_string())) { \
3531 __ JumpIfSmi(rax, if_false); \
3532 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); \
3533 __ CompareRoot(rax, Heap::k##Type##MapRootIndex); \
3534 Split(equal, if_true, if_false, fall_through);
3535 SIMD128_TYPES(SIMD128_TYPE)
3536#undef SIMD128_TYPE
3537 // clang-format on
3538 } else {
3539 if (if_false != fall_through) __ jmp(if_false);
3540 }
3541 context()->Plug(if_true, if_false);
3542}
3543
3544
3545void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3546 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003547
3548 // First we try a fast inlined version of the compare when one of
3549 // the operands is a literal.
3550 if (TryLiteralCompare(expr)) return;
3551
3552 // Always perform the comparison for its control flow. Pack the result
3553 // into the expression's context after the comparison is performed.
3554 Label materialize_true, materialize_false;
3555 Label* if_true = NULL;
3556 Label* if_false = NULL;
3557 Label* fall_through = NULL;
3558 context()->PrepareTest(&materialize_true, &materialize_false,
3559 &if_true, &if_false, &fall_through);
3560
3561 Token::Value op = expr->op();
3562 VisitForStackValue(expr->left());
3563 switch (op) {
3564 case Token::IN:
3565 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003566 SetExpressionPosition(expr);
3567 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003568 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3569 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3570 Split(equal, if_true, if_false, fall_through);
3571 break;
3572
3573 case Token::INSTANCEOF: {
3574 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003575 SetExpressionPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003576 PopOperand(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003577 InstanceOfStub stub(isolate());
3578 __ CallStub(&stub);
3579 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3580 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3581 Split(equal, if_true, if_false, fall_through);
3582 break;
3583 }
3584
3585 default: {
3586 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003587 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003588 Condition cc = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003589 PopOperand(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003590
3591 bool inline_smi_code = ShouldInlineSmiCase(op);
3592 JumpPatchSite patch_site(masm_);
3593 if (inline_smi_code) {
3594 Label slow_case;
3595 __ movp(rcx, rdx);
3596 __ orp(rcx, rax);
3597 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
3598 __ cmpp(rdx, rax);
3599 Split(cc, if_true, if_false, NULL);
3600 __ bind(&slow_case);
3601 }
3602
Ben Murdoch097c5b22016-05-18 11:27:45 +01003603 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003604 CallIC(ic, expr->CompareOperationFeedbackId());
3605 patch_site.EmitPatchInfo();
3606
3607 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3608 __ testp(rax, rax);
3609 Split(cc, if_true, if_false, fall_through);
3610 }
3611 }
3612
3613 // Convert the result of the comparison into one expected for this
3614 // expression's context.
3615 context()->Plug(if_true, if_false);
3616}
3617
3618
3619void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3620 Expression* sub_expr,
3621 NilValue nil) {
3622 Label materialize_true, materialize_false;
3623 Label* if_true = NULL;
3624 Label* if_false = NULL;
3625 Label* fall_through = NULL;
3626 context()->PrepareTest(&materialize_true, &materialize_false,
3627 &if_true, &if_false, &fall_through);
3628
3629 VisitForAccumulatorValue(sub_expr);
3630 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3631 if (expr->op() == Token::EQ_STRICT) {
3632 Heap::RootListIndex nil_value = nil == kNullValue ?
3633 Heap::kNullValueRootIndex :
3634 Heap::kUndefinedValueRootIndex;
3635 __ CompareRoot(rax, nil_value);
3636 Split(equal, if_true, if_false, fall_through);
3637 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003638 __ JumpIfSmi(rax, if_false);
3639 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
3640 __ testb(FieldOperand(rax, Map::kBitFieldOffset),
3641 Immediate(1 << Map::kIsUndetectable));
3642 Split(not_zero, if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003643 }
3644 context()->Plug(if_true, if_false);
3645}
3646
3647
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003648Register FullCodeGenerator::result_register() {
3649 return rax;
3650}
3651
3652
3653Register FullCodeGenerator::context_register() {
3654 return rsi;
3655}
3656
Ben Murdochda12d292016-06-02 14:46:10 +01003657void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3658 DCHECK(IsAligned(frame_offset, kPointerSize));
3659 __ movp(value, Operand(rbp, frame_offset));
3660}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003661
3662void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3663 DCHECK(IsAligned(frame_offset, kPointerSize));
3664 __ movp(Operand(rbp, frame_offset), value);
3665}
3666
3667
3668void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3669 __ movp(dst, ContextOperand(rsi, context_index));
3670}
3671
3672
3673void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3674 Scope* closure_scope = scope()->ClosureScope();
3675 if (closure_scope->is_script_scope() ||
3676 closure_scope->is_module_scope()) {
3677 // Contexts nested in the native context have a canonical empty function
3678 // as their closure, not the anonymous closure containing the global
3679 // code.
3680 __ movp(rax, NativeContextOperand());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003681 PushOperand(ContextOperand(rax, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003682 } else if (closure_scope->is_eval_scope()) {
3683 // Contexts created by a call to eval have the same closure as the
3684 // context calling eval, not the anonymous closure containing the eval
3685 // code. Fetch it from the context.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003686 PushOperand(ContextOperand(rsi, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003687 } else {
3688 DCHECK(closure_scope->is_function_scope());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003689 PushOperand(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003690 }
3691}
3692
3693
3694// ----------------------------------------------------------------------------
3695// Non-local control flow support.
3696
3697
3698void FullCodeGenerator::EnterFinallyBlock() {
3699 DCHECK(!result_register().is(rdx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003700
3701 // Store pending message while executing finally block.
3702 ExternalReference pending_message_obj =
3703 ExternalReference::address_of_pending_message_obj(isolate());
3704 __ Load(rdx, pending_message_obj);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003705 PushOperand(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003706
3707 ClearPendingMessage();
3708}
3709
3710
3711void FullCodeGenerator::ExitFinallyBlock() {
3712 DCHECK(!result_register().is(rdx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003713 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003714 PopOperand(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003715 ExternalReference pending_message_obj =
3716 ExternalReference::address_of_pending_message_obj(isolate());
3717 __ Store(pending_message_obj, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003718}
3719
3720
3721void FullCodeGenerator::ClearPendingMessage() {
3722 DCHECK(!result_register().is(rdx));
3723 ExternalReference pending_message_obj =
3724 ExternalReference::address_of_pending_message_obj(isolate());
3725 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
3726 __ Store(pending_message_obj, rdx);
3727}
3728
3729
Ben Murdoch097c5b22016-05-18 11:27:45 +01003730void FullCodeGenerator::DeferredCommands::EmitCommands() {
3731 __ Pop(result_register()); // Restore the accumulator.
3732 __ Pop(rdx); // Get the token.
3733 for (DeferredCommand cmd : commands_) {
3734 Label skip;
3735 __ SmiCompare(rdx, Smi::FromInt(cmd.token));
3736 __ j(not_equal, &skip);
3737 switch (cmd.command) {
3738 case kReturn:
3739 codegen_->EmitUnwindAndReturn();
3740 break;
3741 case kThrow:
3742 __ Push(result_register());
3743 __ CallRuntime(Runtime::kReThrow);
3744 break;
3745 case kContinue:
3746 codegen_->EmitContinue(cmd.target);
3747 break;
3748 case kBreak:
3749 codegen_->EmitBreak(cmd.target);
3750 break;
3751 }
3752 __ bind(&skip);
3753 }
3754}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003755
3756#undef __
3757
3758
3759static const byte kJnsInstruction = 0x79;
3760static const byte kNopByteOne = 0x66;
3761static const byte kNopByteTwo = 0x90;
3762#ifdef DEBUG
3763static const byte kCallInstruction = 0xe8;
3764#endif
3765
3766
3767void BackEdgeTable::PatchAt(Code* unoptimized_code,
3768 Address pc,
3769 BackEdgeState target_state,
3770 Code* replacement_code) {
3771 Address call_target_address = pc - kIntSize;
3772 Address jns_instr_address = call_target_address - 3;
3773 Address jns_offset_address = call_target_address - 2;
3774
3775 switch (target_state) {
3776 case INTERRUPT:
3777 // sub <profiling_counter>, <delta> ;; Not changed
3778 // jns ok
3779 // call <interrupt stub>
3780 // ok:
3781 *jns_instr_address = kJnsInstruction;
3782 *jns_offset_address = kJnsOffset;
3783 break;
3784 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003785 // sub <profiling_counter>, <delta> ;; Not changed
3786 // nop
3787 // nop
3788 // call <on-stack replacment>
3789 // ok:
3790 *jns_instr_address = kNopByteOne;
3791 *jns_offset_address = kNopByteTwo;
3792 break;
3793 }
3794
3795 Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3796 call_target_address, unoptimized_code,
3797 replacement_code->entry());
3798 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3799 unoptimized_code, call_target_address, replacement_code);
3800}
3801
3802
3803BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3804 Isolate* isolate,
3805 Code* unoptimized_code,
3806 Address pc) {
3807 Address call_target_address = pc - kIntSize;
3808 Address jns_instr_address = call_target_address - 3;
3809 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3810
3811 if (*jns_instr_address == kJnsInstruction) {
3812 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3813 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3814 Assembler::target_address_at(call_target_address,
3815 unoptimized_code));
3816 return INTERRUPT;
3817 }
3818
3819 DCHECK_EQ(kNopByteOne, *jns_instr_address);
3820 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3821
Ben Murdochda12d292016-06-02 14:46:10 +01003822 DCHECK_EQ(
3823 isolate->builtins()->OnStackReplacement()->entry(),
3824 Assembler::target_address_at(call_target_address, unoptimized_code));
3825 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003826}
3827
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003828} // namespace internal
3829} // namespace v8
3830
3831#endif // V8_TARGET_ARCH_X64