blob: d7403fa42761452642953df46cb1e39aae25a916 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X87
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15#include "src/x87/frames-x87.h"
16
17namespace v8 {
18namespace internal {
19
Ben Murdoch097c5b22016-05-18 11:27:45 +010020#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021
22class JumpPatchSite BASE_EMBEDDED {
23 public:
24 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
25#ifdef DEBUG
26 info_emitted_ = false;
27#endif
28 }
29
30 ~JumpPatchSite() {
31 DCHECK(patch_site_.is_bound() == info_emitted_);
32 }
33
34 void EmitJumpIfNotSmi(Register reg,
35 Label* target,
36 Label::Distance distance = Label::kFar) {
37 __ test(reg, Immediate(kSmiTagMask));
38 EmitJump(not_carry, target, distance); // Always taken before patched.
39 }
40
41 void EmitJumpIfSmi(Register reg,
42 Label* target,
43 Label::Distance distance = Label::kFar) {
44 __ test(reg, Immediate(kSmiTagMask));
45 EmitJump(carry, target, distance); // Never taken before patched.
46 }
47
48 void EmitPatchInfo() {
49 if (patch_site_.is_bound()) {
50 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
51 DCHECK(is_uint8(delta_to_patch_site));
52 __ test(eax, Immediate(delta_to_patch_site));
53#ifdef DEBUG
54 info_emitted_ = true;
55#endif
56 } else {
57 __ nop(); // Signals no inlined code.
58 }
59 }
60
61 private:
62 // jc will be patched with jz, jnc will become jnz.
63 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
64 DCHECK(!patch_site_.is_bound() && !info_emitted_);
65 DCHECK(cc == carry || cc == not_carry);
66 __ bind(&patch_site_);
67 __ j(cc, target, distance);
68 }
69
Ben Murdoch097c5b22016-05-18 11:27:45 +010070 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 MacroAssembler* masm_;
72 Label patch_site_;
73#ifdef DEBUG
74 bool info_emitted_;
75#endif
76};
77
78
79// Generate code for a JS function. On entry to the function the receiver
80// and arguments have been pushed on the stack left to right, with the
81// return address on top of them. The actual argument count matches the
82// formal parameter count expected by the function.
83//
84// The live registers are:
85// o edi: the JS function object being called (i.e. ourselves)
86// o edx: the new target value
87// o esi: our context
88// o ebp: our caller's frame pointer
89// o esp: stack pointer (pointing to return address)
90//
91// The function builds a JS frame. Please see JavaScriptFrameConstants in
92// frames-x87.h for its layout.
93void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
95 profiling_counter_ = isolate()->factory()->NewCell(
96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97 SetFunctionPosition(literal());
98 Comment cmnt(masm_, "[ function compiled by full code generator");
99
100 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
103 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
104 __ mov(ecx, Operand(esp, receiver_offset));
105 __ AssertNotSmi(ecx);
106 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
107 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
108 }
109
110 // Open a frame scope to indicate that there is a frame on the stack. The
111 // MANUAL indicates that the scope shouldn't actually generate code to set up
112 // the frame (that is done below).
113 FrameScope frame_scope(masm_, StackFrame::MANUAL);
114
115 info->set_prologue_offset(masm_->pc_offset());
116 __ Prologue(info->GeneratePreagedPrologue());
117
118 { Comment cmnt(masm_, "[ Allocate locals");
119 int locals_count = info->scope()->num_stack_slots();
120 // Generators allocate locals, if any, in context slots.
121 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100122 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 if (locals_count == 1) {
124 __ push(Immediate(isolate()->factory()->undefined_value()));
125 } else if (locals_count > 1) {
126 if (locals_count >= 128) {
127 Label ok;
128 __ mov(ecx, esp);
129 __ sub(ecx, Immediate(locals_count * kPointerSize));
130 ExternalReference stack_limit =
131 ExternalReference::address_of_real_stack_limit(isolate());
132 __ cmp(ecx, Operand::StaticVariable(stack_limit));
133 __ j(above_equal, &ok, Label::kNear);
134 __ CallRuntime(Runtime::kThrowStackOverflow);
135 __ bind(&ok);
136 }
137 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
138 const int kMaxPushes = 32;
139 if (locals_count >= kMaxPushes) {
140 int loop_iterations = locals_count / kMaxPushes;
141 __ mov(ecx, loop_iterations);
142 Label loop_header;
143 __ bind(&loop_header);
144 // Do pushes.
145 for (int i = 0; i < kMaxPushes; i++) {
146 __ push(eax);
147 }
148 __ dec(ecx);
149 __ j(not_zero, &loop_header, Label::kNear);
150 }
151 int remaining = locals_count % kMaxPushes;
152 // Emit the remaining pushes.
153 for (int i = 0; i < remaining; i++) {
154 __ push(eax);
155 }
156 }
157 }
158
159 bool function_in_register = true;
160
161 // Possibly allocate a local context.
162 if (info->scope()->num_heap_slots() > 0) {
163 Comment cmnt(masm_, "[ Allocate context");
164 bool need_write_barrier = true;
165 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
166 // Argument to NewContext is the function, which is still in edi.
167 if (info->scope()->is_script_scope()) {
168 __ push(edi);
169 __ Push(info->scope()->GetScopeInfo(info->isolate()));
170 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100171 PrepareForBailoutForId(BailoutId::ScriptContext(),
172 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173 // The new target value is not used, clobbering is safe.
174 DCHECK_NULL(info->scope()->new_target_var());
175 } else {
176 if (info->scope()->new_target_var() != nullptr) {
177 __ push(edx); // Preserve new target.
178 }
179 if (slots <= FastNewContextStub::kMaximumSlots) {
180 FastNewContextStub stub(isolate(), slots);
181 __ CallStub(&stub);
182 // Result of FastNewContextStub is always in new space.
183 need_write_barrier = false;
184 } else {
185 __ push(edi);
186 __ CallRuntime(Runtime::kNewFunctionContext);
187 }
188 if (info->scope()->new_target_var() != nullptr) {
189 __ pop(edx); // Restore new target.
190 }
191 }
192 function_in_register = false;
193 // Context is returned in eax. It replaces the context passed to us.
194 // It's saved in the stack and kept live in esi.
195 __ mov(esi, eax);
196 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
197
198 // Copy parameters into context if necessary.
199 int num_parameters = info->scope()->num_parameters();
200 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
201 for (int i = first_parameter; i < num_parameters; i++) {
202 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
203 if (var->IsContextSlot()) {
204 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
205 (num_parameters - 1 - i) * kPointerSize;
206 // Load parameter from stack.
207 __ mov(eax, Operand(ebp, parameter_offset));
208 // Store it in the context.
209 int context_offset = Context::SlotOffset(var->index());
210 __ mov(Operand(esi, context_offset), eax);
211 // Update the write barrier. This clobbers eax and ebx.
212 if (need_write_barrier) {
213 __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
214 kDontSaveFPRegs);
215 } else if (FLAG_debug_code) {
216 Label done;
217 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
218 __ Abort(kExpectedNewSpaceObject);
219 __ bind(&done);
220 }
221 }
222 }
223 }
224
225 // Register holding this function and new target are both trashed in case we
226 // bailout here. But since that can happen only when new target is not used
227 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100228 PrepareForBailoutForId(BailoutId::FunctionContext(),
229 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000230
231 // Possibly set up a local binding to the this function which is used in
232 // derived constructors with super calls.
233 Variable* this_function_var = scope()->this_function_var();
234 if (this_function_var != nullptr) {
235 Comment cmnt(masm_, "[ This function");
236 if (!function_in_register) {
237 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
238 // The write barrier clobbers register again, keep it marked as such.
239 }
240 SetVar(this_function_var, edi, ebx, ecx);
241 }
242
243 // Possibly set up a local binding to the new target value.
244 Variable* new_target_var = scope()->new_target_var();
245 if (new_target_var != nullptr) {
246 Comment cmnt(masm_, "[ new.target");
247 SetVar(new_target_var, edx, ebx, ecx);
248 }
249
250 // Possibly allocate RestParameters
251 int rest_index;
252 Variable* rest_param = scope()->rest_parameter(&rest_index);
253 if (rest_param) {
254 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100255 if (!function_in_register) {
256 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
257 }
258 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000259 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100260 function_in_register = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 SetVar(rest_param, eax, ebx, edx);
262 }
263
264 Variable* arguments = scope()->arguments();
265 if (arguments != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100266 // Arguments object must be allocated after the context object, in
267 // case the "arguments" or ".arguments" variables are in the context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000269 if (!function_in_register) {
270 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
271 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100272 if (is_strict(language_mode()) || !has_simple_parameters()) {
273 FastNewStrictArgumentsStub stub(isolate());
274 __ CallStub(&stub);
275 } else if (literal()->has_duplicate_parameters()) {
276 __ Push(edi);
277 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
278 } else {
279 FastNewSloppyArgumentsStub stub(isolate());
280 __ CallStub(&stub);
281 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000282
283 SetVar(arguments, eax, ebx, edx);
284 }
285
286 if (FLAG_trace) {
287 __ CallRuntime(Runtime::kTraceEnter);
288 }
289
Ben Murdochda12d292016-06-02 14:46:10 +0100290 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100291 PrepareForBailoutForId(BailoutId::FunctionEntry(),
292 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100293 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100295 VisitDeclarations(scope()->declarations());
296 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000297
Ben Murdochda12d292016-06-02 14:46:10 +0100298 // Assert that the declarations do not use ICs. Otherwise the debugger
299 // won't be able to redirect a PC at an IC to the correct IC in newly
300 // recompiled code.
301 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302
Ben Murdochda12d292016-06-02 14:46:10 +0100303 {
304 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100305 PrepareForBailoutForId(BailoutId::Declarations(),
306 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100307 Label ok;
308 ExternalReference stack_limit =
309 ExternalReference::address_of_stack_limit(isolate());
310 __ cmp(esp, Operand::StaticVariable(stack_limit));
311 __ j(above_equal, &ok, Label::kNear);
312 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
313 __ bind(&ok);
314 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000315
Ben Murdochda12d292016-06-02 14:46:10 +0100316 {
317 Comment cmnt(masm_, "[ Body");
318 DCHECK(loop_depth() == 0);
319 VisitStatements(literal()->body());
320 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 }
322
323 // Always emit a 'return undefined' in case control fell off the end of
324 // the body.
325 { Comment cmnt(masm_, "[ return <undefined>;");
326 __ mov(eax, isolate()->factory()->undefined_value());
327 EmitReturnSequence();
328 }
329}
330
331
332void FullCodeGenerator::ClearAccumulator() {
333 __ Move(eax, Immediate(Smi::FromInt(0)));
334}
335
336
337void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
338 __ mov(ebx, Immediate(profiling_counter_));
339 __ sub(FieldOperand(ebx, Cell::kValueOffset),
340 Immediate(Smi::FromInt(delta)));
341}
342
343
344void FullCodeGenerator::EmitProfilingCounterReset() {
345 int reset_value = FLAG_interrupt_budget;
346 __ mov(ebx, Immediate(profiling_counter_));
347 __ mov(FieldOperand(ebx, Cell::kValueOffset),
348 Immediate(Smi::FromInt(reset_value)));
349}
350
351
352void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
353 Label* back_edge_target) {
354 Comment cmnt(masm_, "[ Back edge bookkeeping");
355 Label ok;
356
357 DCHECK(back_edge_target->is_bound());
358 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
359 int weight = Min(kMaxBackEdgeWeight,
360 Max(1, distance / kCodeSizeMultiplier));
361 EmitProfilingCounterDecrement(weight);
362 __ j(positive, &ok, Label::kNear);
363 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
364
365 // Record a mapping of this PC offset to the OSR id. This is used to find
366 // the AST id from the unoptimized code in order to use it as a key into
367 // the deoptimization input data found in the optimized code.
368 RecordBackEdge(stmt->OsrEntryId());
369
370 EmitProfilingCounterReset();
371
372 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100373 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 // Record a mapping of the OSR id to this PC. This is used if the OSR
375 // entry becomes the target of a bailout. We don't expect it to be, but
376 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100377 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000378}
379
Ben Murdoch097c5b22016-05-18 11:27:45 +0100380void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
381 bool is_tail_call) {
382 // Pretend that the exit is a backwards jump to the entry.
383 int weight = 1;
384 if (info_->ShouldSelfOptimize()) {
385 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
386 } else {
387 int distance = masm_->pc_offset();
388 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
389 }
390 EmitProfilingCounterDecrement(weight);
391 Label ok;
392 __ j(positive, &ok, Label::kNear);
393 // Don't need to save result register if we are going to do a tail call.
394 if (!is_tail_call) {
395 __ push(eax);
396 }
397 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
398 if (!is_tail_call) {
399 __ pop(eax);
400 }
401 EmitProfilingCounterReset();
402 __ bind(&ok);
403}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000404
405void FullCodeGenerator::EmitReturnSequence() {
406 Comment cmnt(masm_, "[ Return sequence");
407 if (return_label_.is_bound()) {
408 __ jmp(&return_label_);
409 } else {
410 // Common return label
411 __ bind(&return_label_);
412 if (FLAG_trace) {
413 __ push(eax);
414 __ CallRuntime(Runtime::kTraceExit);
415 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100416 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000417
418 SetReturnPosition(literal());
419 __ leave();
420
421 int arg_count = info_->scope()->num_parameters() + 1;
422 int arguments_bytes = arg_count * kPointerSize;
423 __ Ret(arguments_bytes, ecx);
424 }
425}
426
Ben Murdochc5610432016-08-08 18:44:38 +0100427void FullCodeGenerator::RestoreContext() {
428 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
429}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000430
431void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
432 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
433 MemOperand operand = codegen()->VarOperand(var, result_register());
434 // Memory operands can be pushed directly.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100435 codegen()->PushOperand(operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000436}
437
438
439void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
440 UNREACHABLE(); // Not used on X87.
441}
442
443
444void FullCodeGenerator::AccumulatorValueContext::Plug(
445 Heap::RootListIndex index) const {
446 UNREACHABLE(); // Not used on X87.
447}
448
449
450void FullCodeGenerator::StackValueContext::Plug(
451 Heap::RootListIndex index) const {
452 UNREACHABLE(); // Not used on X87.
453}
454
455
456void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
457 UNREACHABLE(); // Not used on X87.
458}
459
460
461void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
462}
463
464
465void FullCodeGenerator::AccumulatorValueContext::Plug(
466 Handle<Object> lit) const {
467 if (lit->IsSmi()) {
468 __ SafeMove(result_register(), Immediate(lit));
469 } else {
470 __ Move(result_register(), Immediate(lit));
471 }
472}
473
474
475void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100476 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000477 if (lit->IsSmi()) {
478 __ SafePush(Immediate(lit));
479 } else {
480 __ push(Immediate(lit));
481 }
482}
483
484
485void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
486 codegen()->PrepareForBailoutBeforeSplit(condition(),
487 true,
488 true_label_,
489 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100490 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000491 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
492 if (false_label_ != fall_through_) __ jmp(false_label_);
493 } else if (lit->IsTrue() || lit->IsJSObject()) {
494 if (true_label_ != fall_through_) __ jmp(true_label_);
495 } else if (lit->IsString()) {
496 if (String::cast(*lit)->length() == 0) {
497 if (false_label_ != fall_through_) __ jmp(false_label_);
498 } else {
499 if (true_label_ != fall_through_) __ jmp(true_label_);
500 }
501 } else if (lit->IsSmi()) {
502 if (Smi::cast(*lit)->value() == 0) {
503 if (false_label_ != fall_through_) __ jmp(false_label_);
504 } else {
505 if (true_label_ != fall_through_) __ jmp(true_label_);
506 }
507 } else {
508 // For simplicity we always test the accumulator register.
509 __ mov(result_register(), lit);
510 codegen()->DoTest(this);
511 }
512}
513
514
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000515void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
516 Register reg) const {
517 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100518 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 __ mov(Operand(esp, 0), reg);
520}
521
522
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000523void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
524 Label* materialize_false) const {
525 DCHECK(materialize_true == materialize_false);
526 __ bind(materialize_true);
527}
528
529
530void FullCodeGenerator::AccumulatorValueContext::Plug(
531 Label* materialize_true,
532 Label* materialize_false) const {
533 Label done;
534 __ bind(materialize_true);
535 __ mov(result_register(), isolate()->factory()->true_value());
536 __ jmp(&done, Label::kNear);
537 __ bind(materialize_false);
538 __ mov(result_register(), isolate()->factory()->false_value());
539 __ bind(&done);
540}
541
542
543void FullCodeGenerator::StackValueContext::Plug(
544 Label* materialize_true,
545 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100546 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000547 Label done;
548 __ bind(materialize_true);
549 __ push(Immediate(isolate()->factory()->true_value()));
550 __ jmp(&done, Label::kNear);
551 __ bind(materialize_false);
552 __ push(Immediate(isolate()->factory()->false_value()));
553 __ bind(&done);
554}
555
556
557void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
558 Label* materialize_false) const {
559 DCHECK(materialize_true == true_label_);
560 DCHECK(materialize_false == false_label_);
561}
562
563
564void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
565 Handle<Object> value = flag
566 ? isolate()->factory()->true_value()
567 : isolate()->factory()->false_value();
568 __ mov(result_register(), value);
569}
570
571
572void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100573 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 Handle<Object> value = flag
575 ? isolate()->factory()->true_value()
576 : isolate()->factory()->false_value();
577 __ push(Immediate(value));
578}
579
580
581void FullCodeGenerator::TestContext::Plug(bool flag) const {
582 codegen()->PrepareForBailoutBeforeSplit(condition(),
583 true,
584 true_label_,
585 false_label_);
586 if (flag) {
587 if (true_label_ != fall_through_) __ jmp(true_label_);
588 } else {
589 if (false_label_ != fall_through_) __ jmp(false_label_);
590 }
591}
592
593
594void FullCodeGenerator::DoTest(Expression* condition,
595 Label* if_true,
596 Label* if_false,
597 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100598 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000599 CallIC(ic, condition->test_id());
600 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
601 Split(equal, if_true, if_false, fall_through);
602}
603
604
605void FullCodeGenerator::Split(Condition cc,
606 Label* if_true,
607 Label* if_false,
608 Label* fall_through) {
609 if (if_false == fall_through) {
610 __ j(cc, if_true);
611 } else if (if_true == fall_through) {
612 __ j(NegateCondition(cc), if_false);
613 } else {
614 __ j(cc, if_true);
615 __ jmp(if_false);
616 }
617}
618
619
620MemOperand FullCodeGenerator::StackOperand(Variable* var) {
621 DCHECK(var->IsStackAllocated());
622 // Offset is negative because higher indexes are at lower addresses.
623 int offset = -var->index() * kPointerSize;
624 // Adjust by a (parameter or local) base offset.
625 if (var->IsParameter()) {
626 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
627 } else {
628 offset += JavaScriptFrameConstants::kLocal0Offset;
629 }
630 return Operand(ebp, offset);
631}
632
633
634MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
635 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
636 if (var->IsContextSlot()) {
637 int context_chain_length = scope()->ContextChainLength(var->scope());
638 __ LoadContext(scratch, context_chain_length);
639 return ContextOperand(scratch, var->index());
640 } else {
641 return StackOperand(var);
642 }
643}
644
645
646void FullCodeGenerator::GetVar(Register dest, Variable* var) {
647 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
648 MemOperand location = VarOperand(var, dest);
649 __ mov(dest, location);
650}
651
652
653void FullCodeGenerator::SetVar(Variable* var,
654 Register src,
655 Register scratch0,
656 Register scratch1) {
657 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
658 DCHECK(!scratch0.is(src));
659 DCHECK(!scratch0.is(scratch1));
660 DCHECK(!scratch1.is(src));
661 MemOperand location = VarOperand(var, scratch0);
662 __ mov(location, src);
663
664 // Emit the write barrier code if the location is in the heap.
665 if (var->IsContextSlot()) {
666 int offset = Context::SlotOffset(var->index());
667 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
668 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
669 }
670}
671
672
673void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
674 bool should_normalize,
675 Label* if_true,
676 Label* if_false) {
677 // Only prepare for bailouts before splits if we're in a test
678 // context. Otherwise, we let the Visit function deal with the
679 // preparation to avoid preparing with the same AST id twice.
680 if (!context()->IsTest()) return;
681
682 Label skip;
683 if (should_normalize) __ jmp(&skip, Label::kNear);
Ben Murdochc5610432016-08-08 18:44:38 +0100684 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000685 if (should_normalize) {
686 __ cmp(eax, isolate()->factory()->true_value());
687 Split(equal, if_true, if_false, NULL);
688 __ bind(&skip);
689 }
690}
691
692
693void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
694 // The variable in the declaration always resides in the current context.
695 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100696 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000697 // Check that we're not inside a with or catch context.
698 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
699 __ cmp(ebx, isolate()->factory()->with_context_map());
700 __ Check(not_equal, kDeclarationInWithContext);
701 __ cmp(ebx, isolate()->factory()->catch_context_map());
702 __ Check(not_equal, kDeclarationInCatchContext);
703 }
704}
705
706
707void FullCodeGenerator::VisitVariableDeclaration(
708 VariableDeclaration* declaration) {
709 // If it was not possible to allocate the variable at compile time, we
710 // need to "declare" it at runtime to make sure it actually exists in the
711 // local context.
712 VariableProxy* proxy = declaration->proxy();
713 VariableMode mode = declaration->mode();
714 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100715 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000716 switch (variable->location()) {
717 case VariableLocation::GLOBAL:
718 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100719 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000720 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100721 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000722 break;
723
724 case VariableLocation::PARAMETER:
725 case VariableLocation::LOCAL:
726 if (hole_init) {
727 Comment cmnt(masm_, "[ VariableDeclaration");
728 __ mov(StackOperand(variable),
729 Immediate(isolate()->factory()->the_hole_value()));
730 }
731 break;
732
733 case VariableLocation::CONTEXT:
734 if (hole_init) {
735 Comment cmnt(masm_, "[ VariableDeclaration");
736 EmitDebugCheckDeclarationContext(variable);
737 __ mov(ContextOperand(esi, variable->index()),
738 Immediate(isolate()->factory()->the_hole_value()));
739 // No write barrier since the hole value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100740 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000741 }
742 break;
743
744 case VariableLocation::LOOKUP: {
745 Comment cmnt(masm_, "[ VariableDeclaration");
746 __ push(Immediate(variable->name()));
747 // VariableDeclaration nodes are always introduced in one of four modes.
748 DCHECK(IsDeclaredVariableMode(mode));
749 // Push initial value, if any.
750 // Note: For variables we must not push an initial value (such as
751 // 'undefined') because we may have a (legal) redeclaration and we
752 // must not destroy the current value.
753 if (hole_init) {
754 __ push(Immediate(isolate()->factory()->the_hole_value()));
755 } else {
756 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
757 }
758 __ push(
759 Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
760 __ CallRuntime(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100761 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000762 break;
763 }
764 }
765}
766
767void FullCodeGenerator::VisitFunctionDeclaration(
768 FunctionDeclaration* declaration) {
769 VariableProxy* proxy = declaration->proxy();
770 Variable* variable = proxy->var();
771 switch (variable->location()) {
772 case VariableLocation::GLOBAL:
773 case VariableLocation::UNALLOCATED: {
774 globals_->Add(variable->name(), zone());
775 Handle<SharedFunctionInfo> function =
776 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
777 // Check for stack-overflow exception.
778 if (function.is_null()) return SetStackOverflow();
779 globals_->Add(function, zone());
780 break;
781 }
782
783 case VariableLocation::PARAMETER:
784 case VariableLocation::LOCAL: {
785 Comment cmnt(masm_, "[ FunctionDeclaration");
786 VisitForAccumulatorValue(declaration->fun());
787 __ mov(StackOperand(variable), result_register());
788 break;
789 }
790
791 case VariableLocation::CONTEXT: {
792 Comment cmnt(masm_, "[ FunctionDeclaration");
793 EmitDebugCheckDeclarationContext(variable);
794 VisitForAccumulatorValue(declaration->fun());
795 __ mov(ContextOperand(esi, variable->index()), result_register());
796 // We know that we have written a function, which is not a smi.
797 __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
798 result_register(), ecx, kDontSaveFPRegs,
799 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100800 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801 break;
802 }
803
804 case VariableLocation::LOOKUP: {
805 Comment cmnt(masm_, "[ FunctionDeclaration");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100806 PushOperand(variable->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000807 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100808 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
809 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100810 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000811 break;
812 }
813 }
814}
815
816
817void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
818 // Call the runtime to declare the globals.
819 __ Push(pairs);
820 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
821 __ CallRuntime(Runtime::kDeclareGlobals);
822 // Return value is ignored.
823}
824
825
826void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
827 // Call the runtime to declare the modules.
828 __ Push(descriptions);
829 __ CallRuntime(Runtime::kDeclareModules);
830 // Return value is ignored.
831}
832
833
834void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
835 Comment cmnt(masm_, "[ SwitchStatement");
836 Breakable nested_statement(this, stmt);
837 SetStatementPosition(stmt);
838
839 // Keep the switch value on the stack until a case matches.
840 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100841 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000842
843 ZoneList<CaseClause*>* clauses = stmt->cases();
844 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
845
846 Label next_test; // Recycled for each test.
847 // Compile all the tests with branches to their bodies.
848 for (int i = 0; i < clauses->length(); i++) {
849 CaseClause* clause = clauses->at(i);
850 clause->body_target()->Unuse();
851
852 // The default is not a test, but remember it as final fall through.
853 if (clause->is_default()) {
854 default_clause = clause;
855 continue;
856 }
857
858 Comment cmnt(masm_, "[ Case comparison");
859 __ bind(&next_test);
860 next_test.Unuse();
861
862 // Compile the label expression.
863 VisitForAccumulatorValue(clause->label());
864
865 // Perform the comparison as if via '==='.
866 __ mov(edx, Operand(esp, 0)); // Switch value.
867 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
868 JumpPatchSite patch_site(masm_);
869 if (inline_smi_code) {
870 Label slow_case;
871 __ mov(ecx, edx);
872 __ or_(ecx, eax);
873 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
874
875 __ cmp(edx, eax);
876 __ j(not_equal, &next_test);
877 __ Drop(1); // Switch value is no longer needed.
878 __ jmp(clause->body_target());
879 __ bind(&slow_case);
880 }
881
882 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100883 Handle<Code> ic =
884 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000885 CallIC(ic, clause->CompareId());
886 patch_site.EmitPatchInfo();
887
888 Label skip;
889 __ jmp(&skip, Label::kNear);
Ben Murdochc5610432016-08-08 18:44:38 +0100890 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891 __ cmp(eax, isolate()->factory()->true_value());
892 __ j(not_equal, &next_test);
893 __ Drop(1);
894 __ jmp(clause->body_target());
895 __ bind(&skip);
896
897 __ test(eax, eax);
898 __ j(not_equal, &next_test);
899 __ Drop(1); // Switch value is no longer needed.
900 __ jmp(clause->body_target());
901 }
902
903 // Discard the test value and jump to the default if present, otherwise to
904 // the end of the statement.
905 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100906 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000907 if (default_clause == NULL) {
908 __ jmp(nested_statement.break_label());
909 } else {
910 __ jmp(default_clause->body_target());
911 }
912
913 // Compile all the case bodies.
914 for (int i = 0; i < clauses->length(); i++) {
915 Comment cmnt(masm_, "[ Case body");
916 CaseClause* clause = clauses->at(i);
917 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100918 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000919 VisitStatements(clause->statements());
920 }
921
922 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100923 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000924}
925
926
927void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
928 Comment cmnt(masm_, "[ ForInStatement");
929 SetStatementPosition(stmt, SKIP_BREAK);
930
931 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
932
Ben Murdoch097c5b22016-05-18 11:27:45 +0100933 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000934 SetExpressionAsStatementPosition(stmt->enumerable());
935 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100936 OperandStackDepthIncrement(5);
937
938 Label loop, exit;
939 Iteration loop_statement(this, stmt);
940 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000941
Ben Murdoch097c5b22016-05-18 11:27:45 +0100942 // If the object is null or undefined, skip over the loop, otherwise convert
943 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000944 Label convert, done_convert;
945 __ JumpIfSmi(eax, &convert, Label::kNear);
946 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
947 __ j(above_equal, &done_convert, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100948 __ cmp(eax, isolate()->factory()->undefined_value());
949 __ j(equal, &exit);
950 __ cmp(eax, isolate()->factory()->null_value());
951 __ j(equal, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000952 __ bind(&convert);
953 ToObjectStub stub(isolate());
954 __ CallStub(&stub);
955 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +0100956 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000957 __ push(eax);
958
Ben Murdochc5610432016-08-08 18:44:38 +0100959 // Check cache validity in generated code. If we cannot guarantee cache
960 // validity, call the runtime system to check cache validity or get the
961 // property names in a fixed array. Note: Proxies never have an enum cache,
962 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100963 Label call_runtime, use_cache, fixed_array;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000964 __ CheckEnumCache(&call_runtime);
965
966 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
967 __ jmp(&use_cache, Label::kNear);
968
969 // Get the set of properties to enumerate.
970 __ bind(&call_runtime);
971 __ push(eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100972 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +0100973 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000974 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
975 isolate()->factory()->meta_map());
976 __ j(not_equal, &fixed_array);
977
978
979 // We got a map in register eax. Get the enumeration cache from it.
980 Label no_descriptors;
981 __ bind(&use_cache);
982
983 __ EnumLength(edx, eax);
984 __ cmp(edx, Immediate(Smi::FromInt(0)));
985 __ j(equal, &no_descriptors);
986
987 __ LoadInstanceDescriptors(eax, ecx);
988 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
989 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
990
991 // Set up the four remaining stack slots.
992 __ push(eax); // Map.
993 __ push(ecx); // Enumeration cache.
994 __ push(edx); // Number of valid entries for the map in the enum cache.
995 __ push(Immediate(Smi::FromInt(0))); // Initial index.
996 __ jmp(&loop);
997
998 __ bind(&no_descriptors);
999 __ add(esp, Immediate(kPointerSize));
1000 __ jmp(&exit);
1001
1002 // We got a fixed array in register eax. Iterate through that.
1003 __ bind(&fixed_array);
1004
Ben Murdochc5610432016-08-08 18:44:38 +01001005 __ push(Immediate(Smi::FromInt(1))); // Smi(1) indicates slow check
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 __ push(eax); // Array
1007 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1008 __ push(eax); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001009 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001010 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1011
1012 // Generate code for doing the condition check.
1013 __ bind(&loop);
1014 SetExpressionAsStatementPosition(stmt->each());
1015
1016 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1017 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1018 __ j(above_equal, loop_statement.break_label());
1019
1020 // Get the current entry of the array into register ebx.
1021 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1022 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1023
1024 // Get the expected map from the stack or a smi in the
1025 // permanent slow case into register edx.
1026 __ mov(edx, Operand(esp, 3 * kPointerSize));
1027
1028 // Check if the expected map still matches that of the enumerable.
1029 // If not, we may have to filter the key.
1030 Label update_each;
1031 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1032 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1033 __ j(equal, &update_each, Label::kNear);
1034
Ben Murdochda12d292016-06-02 14:46:10 +01001035 // We need to filter the key, record slow-path here.
1036 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001037 __ EmitLoadTypeFeedbackVector(edx);
1038 __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1039 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1040
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001041 // Convert the entry to a string or null if it isn't a property
1042 // anymore. If the property has been removed while iterating, we
1043 // just skip it.
1044 __ push(ecx); // Enumerable.
1045 __ push(ebx); // Current entry.
1046 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001047 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001048 __ cmp(eax, isolate()->factory()->undefined_value());
1049 __ j(equal, loop_statement.continue_label());
1050 __ mov(ebx, eax);
1051
1052 // Update the 'each' property or variable from the possibly filtered
1053 // entry in register ebx.
1054 __ bind(&update_each);
1055 __ mov(result_register(), ebx);
1056 // Perform the assignment as if via '='.
1057 { EffectContext context(this);
1058 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001059 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001060 }
1061
1062 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001063 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001064 // Generate code for the body of the loop.
1065 Visit(stmt->body());
1066
1067 // Generate code for going to the next element by incrementing the
1068 // index (smi) stored on top of the stack.
1069 __ bind(loop_statement.continue_label());
1070 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1071
1072 EmitBackEdgeBookkeeping(stmt, &loop);
1073 __ jmp(&loop);
1074
1075 // Remove the pointers stored on the stack.
1076 __ bind(loop_statement.break_label());
Ben Murdochda12d292016-06-02 14:46:10 +01001077 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001078
1079 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001080 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001081 __ bind(&exit);
1082 decrement_loop_depth();
1083}
1084
1085
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001086void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1087 FeedbackVectorSlot slot) {
1088 DCHECK(NeedsHomeObject(initializer));
1089 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1090 __ mov(StoreDescriptor::NameRegister(),
1091 Immediate(isolate()->factory()->home_object_symbol()));
1092 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1093 EmitLoadStoreICSlot(slot);
1094 CallStoreIC();
1095}
1096
1097
1098void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1099 int offset,
1100 FeedbackVectorSlot slot) {
1101 DCHECK(NeedsHomeObject(initializer));
1102 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1103 __ mov(StoreDescriptor::NameRegister(),
1104 Immediate(isolate()->factory()->home_object_symbol()));
1105 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1106 EmitLoadStoreICSlot(slot);
1107 CallStoreIC();
1108}
1109
1110
1111void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1112 TypeofMode typeof_mode,
1113 Label* slow) {
1114 Register context = esi;
1115 Register temp = edx;
1116
1117 Scope* s = scope();
1118 while (s != NULL) {
1119 if (s->num_heap_slots() > 0) {
1120 if (s->calls_sloppy_eval()) {
1121 // Check that extension is "the hole".
1122 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1123 Heap::kTheHoleValueRootIndex, slow);
1124 }
1125 // Load next context in chain.
1126 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1127 // Walk the rest of the chain without clobbering esi.
1128 context = temp;
1129 }
1130 // If no outer scope calls eval, we do not need to check more
1131 // context extensions. If we have reached an eval scope, we check
1132 // all extensions from this point.
1133 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1134 s = s->outer_scope();
1135 }
1136
1137 if (s != NULL && s->is_eval_scope()) {
1138 // Loop up the context chain. There is no frame effect so it is
1139 // safe to use raw labels here.
1140 Label next, fast;
1141 if (!context.is(temp)) {
1142 __ mov(temp, context);
1143 }
1144 __ bind(&next);
1145 // Terminate at native context.
1146 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1147 Immediate(isolate()->factory()->native_context_map()));
1148 __ j(equal, &fast, Label::kNear);
1149 // Check that extension is "the hole".
1150 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1151 Heap::kTheHoleValueRootIndex, slow);
1152 // Load next context in chain.
1153 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1154 __ jmp(&next);
1155 __ bind(&fast);
1156 }
1157
1158 // All extension objects were empty and it is safe to use a normal global
1159 // load machinery.
1160 EmitGlobalVariableLoad(proxy, typeof_mode);
1161}
1162
1163
1164MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1165 Label* slow) {
1166 DCHECK(var->IsContextSlot());
1167 Register context = esi;
1168 Register temp = ebx;
1169
1170 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1171 if (s->num_heap_slots() > 0) {
1172 if (s->calls_sloppy_eval()) {
1173 // Check that extension is "the hole".
1174 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1175 Heap::kTheHoleValueRootIndex, slow);
1176 }
1177 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1178 // Walk the rest of the chain without clobbering esi.
1179 context = temp;
1180 }
1181 }
1182 // Check that last extension is "the hole".
1183 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1184 Heap::kTheHoleValueRootIndex, slow);
1185
1186 // This function is used only for loads, not stores, so it's safe to
1187 // return an esi-based operand (the write barrier cannot be allowed to
1188 // destroy the esi register).
1189 return ContextOperand(context, var->index());
1190}
1191
1192
1193void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1194 TypeofMode typeof_mode,
1195 Label* slow, Label* done) {
1196 // Generate fast-case code for variables that might be shadowed by
1197 // eval-introduced variables. Eval is used a lot without
1198 // introducing variables. In those cases, we do not want to
1199 // perform a runtime call for all variables in the scope
1200 // containing the eval.
1201 Variable* var = proxy->var();
1202 if (var->mode() == DYNAMIC_GLOBAL) {
1203 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1204 __ jmp(done);
1205 } else if (var->mode() == DYNAMIC_LOCAL) {
1206 Variable* local = var->local_if_not_shadowed();
1207 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001208 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001209 __ cmp(eax, isolate()->factory()->the_hole_value());
1210 __ j(not_equal, done);
Ben Murdochc5610432016-08-08 18:44:38 +01001211 __ push(Immediate(var->name()));
1212 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001213 }
1214 __ jmp(done);
1215 }
1216}
1217
1218
1219void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1220 TypeofMode typeof_mode) {
1221 Variable* var = proxy->var();
1222 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1223 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1224 __ mov(LoadDescriptor::ReceiverRegister(), NativeContextOperand());
1225 __ mov(LoadDescriptor::ReceiverRegister(),
1226 ContextOperand(LoadDescriptor::ReceiverRegister(),
1227 Context::EXTENSION_INDEX));
1228 __ mov(LoadDescriptor::NameRegister(), var->name());
1229 __ mov(LoadDescriptor::SlotRegister(),
1230 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1231 CallLoadIC(typeof_mode);
1232}
1233
1234
1235void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1236 TypeofMode typeof_mode) {
1237 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001238 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001239 Variable* var = proxy->var();
1240
1241 // Three cases: global variables, lookup variables, and all other types of
1242 // variables.
1243 switch (var->location()) {
1244 case VariableLocation::GLOBAL:
1245 case VariableLocation::UNALLOCATED: {
1246 Comment cmnt(masm_, "[ Global variable");
1247 EmitGlobalVariableLoad(proxy, typeof_mode);
1248 context()->Plug(eax);
1249 break;
1250 }
1251
1252 case VariableLocation::PARAMETER:
1253 case VariableLocation::LOCAL:
1254 case VariableLocation::CONTEXT: {
1255 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1256 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1257 : "[ Stack variable");
1258
1259 if (NeedsHoleCheckForLoad(proxy)) {
1260 // Let and const need a read barrier.
1261 Label done;
1262 GetVar(eax, var);
1263 __ cmp(eax, isolate()->factory()->the_hole_value());
1264 __ j(not_equal, &done, Label::kNear);
1265 if (var->mode() == LET || var->mode() == CONST) {
1266 // Throw a reference error when using an uninitialized let/const
1267 // binding in harmony mode.
1268 __ push(Immediate(var->name()));
1269 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001270 }
1271 __ bind(&done);
1272 context()->Plug(eax);
1273 break;
1274 }
1275 context()->Plug(var);
1276 break;
1277 }
1278
1279 case VariableLocation::LOOKUP: {
1280 Comment cmnt(masm_, "[ Lookup variable");
1281 Label done, slow;
1282 // Generate code for loading from variables potentially shadowed
1283 // by eval-introduced variables.
1284 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1285 __ bind(&slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001286 __ push(Immediate(var->name()));
1287 Runtime::FunctionId function_id =
1288 typeof_mode == NOT_INSIDE_TYPEOF
1289 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001290 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001291 __ CallRuntime(function_id);
1292 __ bind(&done);
1293 context()->Plug(eax);
1294 break;
1295 }
1296 }
1297}
1298
1299
1300void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1301 Comment cmnt(masm_, "[ RegExpLiteral");
1302 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1303 __ Move(eax, Immediate(Smi::FromInt(expr->literal_index())));
1304 __ Move(ecx, Immediate(expr->pattern()));
1305 __ Move(edx, Immediate(Smi::FromInt(expr->flags())));
1306 FastCloneRegExpStub stub(isolate());
1307 __ CallStub(&stub);
1308 context()->Plug(eax);
1309}
1310
1311
1312void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1313 Expression* expression = (property == NULL) ? NULL : property->value();
1314 if (expression == NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001315 PushOperand(isolate()->factory()->null_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001316 } else {
1317 VisitForStackValue(expression);
1318 if (NeedsHomeObject(expression)) {
1319 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1320 property->kind() == ObjectLiteral::Property::SETTER);
1321 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1322 EmitSetHomeObject(expression, offset, property->GetSlot());
1323 }
1324 }
1325}
1326
1327
1328void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1329 Comment cmnt(masm_, "[ ObjectLiteral");
1330
1331 Handle<FixedArray> constant_properties = expr->constant_properties();
1332 int flags = expr->ComputeFlags();
1333 // If any of the keys would store to the elements array, then we shouldn't
1334 // allow it.
1335 if (MustCreateObjectLiteralWithRuntime(expr)) {
1336 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1337 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1338 __ push(Immediate(constant_properties));
1339 __ push(Immediate(Smi::FromInt(flags)));
1340 __ CallRuntime(Runtime::kCreateObjectLiteral);
1341 } else {
1342 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1343 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1344 __ mov(ecx, Immediate(constant_properties));
1345 __ mov(edx, Immediate(Smi::FromInt(flags)));
1346 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1347 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001348 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001349 }
Ben Murdochc5610432016-08-08 18:44:38 +01001350 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001351
1352 // If result_saved is true the result is on top of the stack. If
1353 // result_saved is false the result is in eax.
1354 bool result_saved = false;
1355
1356 AccessorTable accessor_table(zone());
1357 int property_index = 0;
1358 for (; property_index < expr->properties()->length(); property_index++) {
1359 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1360 if (property->is_computed_name()) break;
1361 if (property->IsCompileTimeValue()) continue;
1362
1363 Literal* key = property->key()->AsLiteral();
1364 Expression* value = property->value();
1365 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001366 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367 result_saved = true;
1368 }
1369 switch (property->kind()) {
1370 case ObjectLiteral::Property::CONSTANT:
1371 UNREACHABLE();
1372 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1373 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1374 // Fall through.
1375 case ObjectLiteral::Property::COMPUTED:
1376 // It is safe to use [[Put]] here because the boilerplate already
1377 // contains computed properties with an uninitialized value.
1378 if (key->value()->IsInternalizedString()) {
1379 if (property->emit_store()) {
1380 VisitForAccumulatorValue(value);
1381 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1382 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1383 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1384 EmitLoadStoreICSlot(property->GetSlot(0));
1385 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001386 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001387 if (NeedsHomeObject(value)) {
1388 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1389 }
1390 } else {
1391 VisitForEffect(value);
1392 }
1393 break;
1394 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001395 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001396 VisitForStackValue(key);
1397 VisitForStackValue(value);
1398 if (property->emit_store()) {
1399 if (NeedsHomeObject(value)) {
1400 EmitSetHomeObject(value, 2, property->GetSlot());
1401 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001402 PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1403 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001404 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001405 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001406 }
1407 break;
1408 case ObjectLiteral::Property::PROTOTYPE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001409 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001410 VisitForStackValue(value);
1411 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001412 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001413 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001414 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 break;
1416 case ObjectLiteral::Property::GETTER:
1417 if (property->emit_store()) {
1418 accessor_table.lookup(key)->second->getter = property;
1419 }
1420 break;
1421 case ObjectLiteral::Property::SETTER:
1422 if (property->emit_store()) {
1423 accessor_table.lookup(key)->second->setter = property;
1424 }
1425 break;
1426 }
1427 }
1428
1429 // Emit code to define accessors, using only a single call to the runtime for
1430 // each pair of corresponding getters and setters.
1431 for (AccessorTable::Iterator it = accessor_table.begin();
1432 it != accessor_table.end();
1433 ++it) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001434 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001435 VisitForStackValue(it->first);
1436
1437 EmitAccessor(it->second->getter);
1438 EmitAccessor(it->second->setter);
1439
Ben Murdoch097c5b22016-05-18 11:27:45 +01001440 PushOperand(Smi::FromInt(NONE));
1441 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001442 }
1443
1444 // Object literals have two parts. The "static" part on the left contains no
1445 // computed property names, and so we can compute its map ahead of time; see
1446 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1447 // starts with the first computed property name, and continues with all
1448 // properties to its right. All the code from above initializes the static
1449 // component of the object literal, and arranges for the map of the result to
1450 // reflect the static order in which the keys appear. For the dynamic
1451 // properties, we compile them into a series of "SetOwnProperty" runtime
1452 // calls. This will preserve insertion order.
1453 for (; property_index < expr->properties()->length(); property_index++) {
1454 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1455
1456 Expression* value = property->value();
1457 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001458 PushOperand(eax); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001459 result_saved = true;
1460 }
1461
Ben Murdoch097c5b22016-05-18 11:27:45 +01001462 PushOperand(Operand(esp, 0)); // Duplicate receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001463
1464 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1465 DCHECK(!property->is_computed_name());
1466 VisitForStackValue(value);
1467 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001468 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001469 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001470 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001471 } else {
1472 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1473 VisitForStackValue(value);
1474 if (NeedsHomeObject(value)) {
1475 EmitSetHomeObject(value, 2, property->GetSlot());
1476 }
1477
1478 switch (property->kind()) {
1479 case ObjectLiteral::Property::CONSTANT:
1480 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1481 case ObjectLiteral::Property::COMPUTED:
1482 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001483 PushOperand(Smi::FromInt(NONE));
1484 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1485 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001486 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001487 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001488 }
1489 break;
1490
1491 case ObjectLiteral::Property::PROTOTYPE:
1492 UNREACHABLE();
1493 break;
1494
1495 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001496 PushOperand(Smi::FromInt(NONE));
1497 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001498 break;
1499
1500 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001501 PushOperand(Smi::FromInt(NONE));
1502 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001503 break;
1504 }
1505 }
1506 }
1507
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001508 if (result_saved) {
1509 context()->PlugTOS();
1510 } else {
1511 context()->Plug(eax);
1512 }
1513}
1514
1515
1516void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1517 Comment cmnt(masm_, "[ ArrayLiteral");
1518
1519 Handle<FixedArray> constant_elements = expr->constant_elements();
1520 bool has_constant_fast_elements =
1521 IsFastObjectElementsKind(expr->constant_elements_kind());
1522
1523 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1524 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1525 // If the only customer of allocation sites is transitioning, then
1526 // we can turn it off if we don't have anywhere else to transition to.
1527 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1528 }
1529
1530 if (MustCreateArrayLiteralWithRuntime(expr)) {
1531 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1532 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1533 __ push(Immediate(constant_elements));
1534 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1535 __ CallRuntime(Runtime::kCreateArrayLiteral);
1536 } else {
1537 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1538 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1539 __ mov(ecx, Immediate(constant_elements));
1540 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1541 __ CallStub(&stub);
1542 }
Ben Murdochc5610432016-08-08 18:44:38 +01001543 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544
1545 bool result_saved = false; // Is the result saved to the stack?
1546 ZoneList<Expression*>* subexprs = expr->values();
1547 int length = subexprs->length();
1548
1549 // Emit code to evaluate all the non-constant subexpressions and to store
1550 // them into the newly cloned array.
1551 int array_index = 0;
1552 for (; array_index < length; array_index++) {
1553 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001554 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001555
1556 // If the subexpression is a literal or a simple materialized literal it
1557 // is already set in the cloned array.
1558 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1559
1560 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001561 PushOperand(eax); // array literal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001562 result_saved = true;
1563 }
1564 VisitForAccumulatorValue(subexpr);
1565
1566 __ mov(StoreDescriptor::NameRegister(),
1567 Immediate(Smi::FromInt(array_index)));
1568 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1569 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1570 Handle<Code> ic =
1571 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1572 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01001573 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1574 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575 }
1576
1577 // In case the array literal contains spread expressions it has two parts. The
1578 // first part is the "static" array which has a literal index is handled
1579 // above. The second part is the part after the first spread expression
1580 // (inclusive) and these elements gets appended to the array. Note that the
1581 // number elements an iterable produces is unknown ahead of time.
1582 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001583 PopOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001584 result_saved = false;
1585 }
1586 for (; array_index < length; array_index++) {
1587 Expression* subexpr = subexprs->at(array_index);
1588
Ben Murdoch097c5b22016-05-18 11:27:45 +01001589 PushOperand(eax);
1590 DCHECK(!subexpr->IsSpread());
1591 VisitForStackValue(subexpr);
1592 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593
Ben Murdochc5610432016-08-08 18:44:38 +01001594 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1595 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001596 }
1597
1598 if (result_saved) {
1599 context()->PlugTOS();
1600 } else {
1601 context()->Plug(eax);
1602 }
1603}
1604
1605
1606void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1607 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1608
1609 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610
1611 Property* property = expr->target()->AsProperty();
1612 LhsKind assign_type = Property::GetAssignType(property);
1613
1614 // Evaluate LHS expression.
1615 switch (assign_type) {
1616 case VARIABLE:
1617 // Nothing to do here.
1618 break;
1619 case NAMED_SUPER_PROPERTY:
1620 VisitForStackValue(
1621 property->obj()->AsSuperPropertyReference()->this_var());
1622 VisitForAccumulatorValue(
1623 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001624 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001626 PushOperand(MemOperand(esp, kPointerSize));
1627 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001628 }
1629 break;
1630 case NAMED_PROPERTY:
1631 if (expr->is_compound()) {
1632 // We need the receiver both on the stack and in the register.
1633 VisitForStackValue(property->obj());
1634 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1635 } else {
1636 VisitForStackValue(property->obj());
1637 }
1638 break;
1639 case KEYED_SUPER_PROPERTY:
1640 VisitForStackValue(
1641 property->obj()->AsSuperPropertyReference()->this_var());
1642 VisitForStackValue(
1643 property->obj()->AsSuperPropertyReference()->home_object());
1644 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001645 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001646 if (expr->is_compound()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001647 PushOperand(MemOperand(esp, 2 * kPointerSize));
1648 PushOperand(MemOperand(esp, 2 * kPointerSize));
1649 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001650 }
1651 break;
1652 case KEYED_PROPERTY: {
1653 if (expr->is_compound()) {
1654 VisitForStackValue(property->obj());
1655 VisitForStackValue(property->key());
1656 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1657 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1658 } else {
1659 VisitForStackValue(property->obj());
1660 VisitForStackValue(property->key());
1661 }
1662 break;
1663 }
1664 }
1665
1666 // For compound assignments we need another deoptimization point after the
1667 // variable/property load.
1668 if (expr->is_compound()) {
1669 AccumulatorValueContext result_context(this);
1670 { AccumulatorValueContext left_operand_context(this);
1671 switch (assign_type) {
1672 case VARIABLE:
1673 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001674 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001675 break;
1676 case NAMED_SUPER_PROPERTY:
1677 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001678 PrepareForBailoutForId(property->LoadId(),
1679 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001680 break;
1681 case NAMED_PROPERTY:
1682 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001683 PrepareForBailoutForId(property->LoadId(),
1684 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001685 break;
1686 case KEYED_SUPER_PROPERTY:
1687 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001688 PrepareForBailoutForId(property->LoadId(),
1689 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001690 break;
1691 case KEYED_PROPERTY:
1692 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001693 PrepareForBailoutForId(property->LoadId(),
1694 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001695 break;
1696 }
1697 }
1698
1699 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001700 PushOperand(eax); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701 VisitForAccumulatorValue(expr->value());
1702
1703 if (ShouldInlineSmiCase(op)) {
1704 EmitInlineSmiBinaryOp(expr->binary_operation(),
1705 op,
1706 expr->target(),
1707 expr->value());
1708 } else {
1709 EmitBinaryOp(expr->binary_operation(), op);
1710 }
1711
1712 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001713 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001714 } else {
1715 VisitForAccumulatorValue(expr->value());
1716 }
1717
1718 SetExpressionPosition(expr);
1719
1720 // Store the value.
1721 switch (assign_type) {
1722 case VARIABLE:
1723 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1724 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001725 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001726 context()->Plug(eax);
1727 break;
1728 case NAMED_PROPERTY:
1729 EmitNamedPropertyAssignment(expr);
1730 break;
1731 case NAMED_SUPER_PROPERTY:
1732 EmitNamedSuperPropertyStore(property);
1733 context()->Plug(result_register());
1734 break;
1735 case KEYED_SUPER_PROPERTY:
1736 EmitKeyedSuperPropertyStore(property);
1737 context()->Plug(result_register());
1738 break;
1739 case KEYED_PROPERTY:
1740 EmitKeyedPropertyAssignment(expr);
1741 break;
1742 }
1743}
1744
1745
1746void FullCodeGenerator::VisitYield(Yield* expr) {
1747 Comment cmnt(masm_, "[ Yield");
1748 SetExpressionPosition(expr);
1749
1750 // Evaluate yielded value first; the initial iterator definition depends on
1751 // this. It stays on the stack while we update the iterator.
1752 VisitForStackValue(expr->expression());
1753
Ben Murdochc5610432016-08-08 18:44:38 +01001754 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001755
Ben Murdochda12d292016-06-02 14:46:10 +01001756 __ jmp(&suspend);
1757 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001758 // When we arrive here, eax holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001759 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001760 __ mov(ebx, FieldOperand(eax, JSGeneratorObject::kResumeModeOffset));
1761 __ mov(eax, FieldOperand(eax, JSGeneratorObject::kInputOffset));
1762 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1763 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1764 __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::kReturn)));
1765 __ j(less, &resume);
1766 __ Push(result_register());
1767 __ j(greater, &exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001768 EmitCreateIteratorResult(true);
1769 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001770
Ben Murdochc5610432016-08-08 18:44:38 +01001771 __ bind(&exception);
1772 __ CallRuntime(Runtime::kThrow);
1773
Ben Murdochda12d292016-06-02 14:46:10 +01001774 __ bind(&suspend);
1775 OperandStackDepthIncrement(1); // Not popped on this path.
1776 VisitForAccumulatorValue(expr->generator_object());
1777 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1778 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1779 Immediate(Smi::FromInt(continuation.pos())));
1780 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1781 __ mov(ecx, esi);
1782 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1783 kDontSaveFPRegs);
1784 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1785 __ cmp(esp, ebx);
1786 __ j(equal, &post_runtime);
1787 __ push(eax); // generator object
1788 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001789 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001790 __ bind(&post_runtime);
1791 PopOperand(result_register());
1792 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001793
Ben Murdochda12d292016-06-02 14:46:10 +01001794 __ bind(&resume);
1795 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001796}
1797
Ben Murdoch097c5b22016-05-18 11:27:45 +01001798void FullCodeGenerator::PushOperand(MemOperand operand) {
1799 OperandStackDepthIncrement(1);
1800 __ Push(operand);
1801}
1802
1803void FullCodeGenerator::EmitOperandStackDepthCheck() {
1804 if (FLAG_debug_code) {
1805 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1806 operand_stack_depth_ * kPointerSize;
1807 __ mov(eax, ebp);
1808 __ sub(eax, esp);
1809 __ cmp(eax, Immediate(expected_diff));
1810 __ Assert(equal, kUnexpectedStackDepth);
1811 }
1812}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001813
1814void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1815 Label allocate, done_allocate;
1816
Ben Murdochc5610432016-08-08 18:44:38 +01001817 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate,
1818 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001819 __ jmp(&done_allocate, Label::kNear);
1820
1821 __ bind(&allocate);
1822 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1823 __ CallRuntime(Runtime::kAllocateInNewSpace);
1824
1825 __ bind(&done_allocate);
1826 __ mov(ebx, NativeContextOperand());
1827 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1828 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1829 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1830 isolate()->factory()->empty_fixed_array());
1831 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1832 isolate()->factory()->empty_fixed_array());
1833 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1834 __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1835 isolate()->factory()->ToBoolean(done));
1836 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
Ben Murdochda12d292016-06-02 14:46:10 +01001837 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001838}
1839
1840
1841void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1842 Token::Value op,
1843 Expression* left,
1844 Expression* right) {
1845 // Do combined smi check of the operands. Left operand is on the
1846 // stack. Right operand is in eax.
1847 Label smi_case, done, stub_call;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001848 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001849 __ mov(ecx, eax);
1850 __ or_(eax, edx);
1851 JumpPatchSite patch_site(masm_);
1852 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1853
1854 __ bind(&stub_call);
1855 __ mov(eax, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001856 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001857 CallIC(code, expr->BinaryOperationFeedbackId());
1858 patch_site.EmitPatchInfo();
1859 __ jmp(&done, Label::kNear);
1860
1861 // Smi case.
1862 __ bind(&smi_case);
1863 __ mov(eax, edx); // Copy left operand in case of a stub call.
1864
1865 switch (op) {
1866 case Token::SAR:
1867 __ SmiUntag(ecx);
1868 __ sar_cl(eax); // No checks of result necessary
1869 __ and_(eax, Immediate(~kSmiTagMask));
1870 break;
1871 case Token::SHL: {
1872 Label result_ok;
1873 __ SmiUntag(eax);
1874 __ SmiUntag(ecx);
1875 __ shl_cl(eax);
1876 // Check that the *signed* result fits in a smi.
1877 __ cmp(eax, 0xc0000000);
1878 __ j(positive, &result_ok);
1879 __ SmiTag(ecx);
1880 __ jmp(&stub_call);
1881 __ bind(&result_ok);
1882 __ SmiTag(eax);
1883 break;
1884 }
1885 case Token::SHR: {
1886 Label result_ok;
1887 __ SmiUntag(eax);
1888 __ SmiUntag(ecx);
1889 __ shr_cl(eax);
1890 __ test(eax, Immediate(0xc0000000));
1891 __ j(zero, &result_ok);
1892 __ SmiTag(ecx);
1893 __ jmp(&stub_call);
1894 __ bind(&result_ok);
1895 __ SmiTag(eax);
1896 break;
1897 }
1898 case Token::ADD:
1899 __ add(eax, ecx);
1900 __ j(overflow, &stub_call);
1901 break;
1902 case Token::SUB:
1903 __ sub(eax, ecx);
1904 __ j(overflow, &stub_call);
1905 break;
1906 case Token::MUL: {
1907 __ SmiUntag(eax);
1908 __ imul(eax, ecx);
1909 __ j(overflow, &stub_call);
1910 __ test(eax, eax);
1911 __ j(not_zero, &done, Label::kNear);
1912 __ mov(ebx, edx);
1913 __ or_(ebx, ecx);
1914 __ j(negative, &stub_call);
1915 break;
1916 }
1917 case Token::BIT_OR:
1918 __ or_(eax, ecx);
1919 break;
1920 case Token::BIT_AND:
1921 __ and_(eax, ecx);
1922 break;
1923 case Token::BIT_XOR:
1924 __ xor_(eax, ecx);
1925 break;
1926 default:
1927 UNREACHABLE();
1928 }
1929
1930 __ bind(&done);
1931 context()->Plug(eax);
1932}
1933
1934
1935void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001936 for (int i = 0; i < lit->properties()->length(); i++) {
1937 ObjectLiteral::Property* property = lit->properties()->at(i);
1938 Expression* value = property->value();
1939
1940 if (property->is_static()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001941 PushOperand(Operand(esp, kPointerSize)); // constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001942 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001943 PushOperand(Operand(esp, 0)); // prototype
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001944 }
1945 EmitPropertyKey(property, lit->GetIdForProperty(i));
1946
1947 // The static prototype property is read only. We handle the non computed
1948 // property name case in the parser. Since this is the only case where we
1949 // need to check for an own read only property we special case this so we do
1950 // not need to do this for every property.
1951 if (property->is_static() && property->is_computed_name()) {
1952 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1953 __ push(eax);
1954 }
1955
1956 VisitForStackValue(value);
1957 if (NeedsHomeObject(value)) {
1958 EmitSetHomeObject(value, 2, property->GetSlot());
1959 }
1960
1961 switch (property->kind()) {
1962 case ObjectLiteral::Property::CONSTANT:
1963 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1964 case ObjectLiteral::Property::PROTOTYPE:
1965 UNREACHABLE();
1966 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001967 PushOperand(Smi::FromInt(DONT_ENUM));
1968 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1969 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001970 break;
1971
1972 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001973 PushOperand(Smi::FromInt(DONT_ENUM));
1974 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001975 break;
1976
1977 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001978 PushOperand(Smi::FromInt(DONT_ENUM));
1979 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001980 break;
1981 }
1982 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001983}
1984
1985
1986void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001987 PopOperand(edx);
1988 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001989 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1990 CallIC(code, expr->BinaryOperationFeedbackId());
1991 patch_site.EmitPatchInfo();
1992 context()->Plug(eax);
1993}
1994
1995
1996void FullCodeGenerator::EmitAssignment(Expression* expr,
1997 FeedbackVectorSlot slot) {
1998 DCHECK(expr->IsValidReferenceExpressionOrThis());
1999
2000 Property* prop = expr->AsProperty();
2001 LhsKind assign_type = Property::GetAssignType(prop);
2002
2003 switch (assign_type) {
2004 case VARIABLE: {
2005 Variable* var = expr->AsVariableProxy()->var();
2006 EffectContext context(this);
2007 EmitVariableAssignment(var, Token::ASSIGN, slot);
2008 break;
2009 }
2010 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002011 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002012 VisitForAccumulatorValue(prop->obj());
2013 __ Move(StoreDescriptor::ReceiverRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002014 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002015 __ mov(StoreDescriptor::NameRegister(),
2016 prop->key()->AsLiteral()->value());
2017 EmitLoadStoreICSlot(slot);
2018 CallStoreIC();
2019 break;
2020 }
2021 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002022 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002023 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2024 VisitForAccumulatorValue(
2025 prop->obj()->AsSuperPropertyReference()->home_object());
2026 // stack: value, this; eax: home_object
2027 Register scratch = ecx;
2028 Register scratch2 = edx;
2029 __ mov(scratch, result_register()); // home_object
2030 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2031 __ mov(scratch2, MemOperand(esp, 0)); // this
2032 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2033 __ mov(MemOperand(esp, 0), scratch); // home_object
2034 // stack: this, home_object. eax: value
2035 EmitNamedSuperPropertyStore(prop);
2036 break;
2037 }
2038 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002039 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002040 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2041 VisitForStackValue(
2042 prop->obj()->AsSuperPropertyReference()->home_object());
2043 VisitForAccumulatorValue(prop->key());
2044 Register scratch = ecx;
2045 Register scratch2 = edx;
2046 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2047 // stack: value, this, home_object; eax: key, edx: value
2048 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2049 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2050 __ mov(scratch, MemOperand(esp, 0)); // home_object
2051 __ mov(MemOperand(esp, kPointerSize), scratch);
2052 __ mov(MemOperand(esp, 0), eax);
2053 __ mov(eax, scratch2);
2054 // stack: this, home_object, key; eax: value.
2055 EmitKeyedSuperPropertyStore(prop);
2056 break;
2057 }
2058 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002059 PushOperand(eax); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002060 VisitForStackValue(prop->obj());
2061 VisitForAccumulatorValue(prop->key());
2062 __ Move(StoreDescriptor::NameRegister(), eax);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002063 PopOperand(StoreDescriptor::ReceiverRegister()); // Receiver.
2064 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002065 EmitLoadStoreICSlot(slot);
2066 Handle<Code> ic =
2067 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2068 CallIC(ic);
2069 break;
2070 }
2071 }
2072 context()->Plug(eax);
2073}
2074
2075
2076void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2077 Variable* var, MemOperand location) {
2078 __ mov(location, eax);
2079 if (var->IsContextSlot()) {
2080 __ mov(edx, eax);
2081 int offset = Context::SlotOffset(var->index());
2082 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2083 }
2084}
2085
2086
2087void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2088 FeedbackVectorSlot slot) {
2089 if (var->IsUnallocated()) {
2090 // Global var, const, or let.
2091 __ mov(StoreDescriptor::NameRegister(), var->name());
2092 __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2093 __ mov(StoreDescriptor::ReceiverRegister(),
2094 ContextOperand(StoreDescriptor::ReceiverRegister(),
2095 Context::EXTENSION_INDEX));
2096 EmitLoadStoreICSlot(slot);
2097 CallStoreIC();
2098
2099 } else if (var->mode() == LET && op != Token::INIT) {
2100 // Non-initializing assignment to let variable needs a write barrier.
2101 DCHECK(!var->IsLookupSlot());
2102 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2103 Label assign;
2104 MemOperand location = VarOperand(var, ecx);
2105 __ mov(edx, location);
2106 __ cmp(edx, isolate()->factory()->the_hole_value());
2107 __ j(not_equal, &assign, Label::kNear);
2108 __ push(Immediate(var->name()));
2109 __ CallRuntime(Runtime::kThrowReferenceError);
2110 __ bind(&assign);
2111 EmitStoreToStackLocalOrContextSlot(var, location);
2112
2113 } else if (var->mode() == CONST && op != Token::INIT) {
2114 // Assignment to const variable needs a write barrier.
2115 DCHECK(!var->IsLookupSlot());
2116 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2117 Label const_error;
2118 MemOperand location = VarOperand(var, ecx);
2119 __ mov(edx, location);
2120 __ cmp(edx, isolate()->factory()->the_hole_value());
2121 __ j(not_equal, &const_error, Label::kNear);
2122 __ push(Immediate(var->name()));
2123 __ CallRuntime(Runtime::kThrowReferenceError);
2124 __ bind(&const_error);
2125 __ CallRuntime(Runtime::kThrowConstAssignError);
2126
2127 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2128 // Initializing assignment to const {this} needs a write barrier.
2129 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2130 Label uninitialized_this;
2131 MemOperand location = VarOperand(var, ecx);
2132 __ mov(edx, location);
2133 __ cmp(edx, isolate()->factory()->the_hole_value());
2134 __ j(equal, &uninitialized_this);
2135 __ push(Immediate(var->name()));
2136 __ CallRuntime(Runtime::kThrowReferenceError);
2137 __ bind(&uninitialized_this);
2138 EmitStoreToStackLocalOrContextSlot(var, location);
2139
Ben Murdochc5610432016-08-08 18:44:38 +01002140 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002141 if (var->IsLookupSlot()) {
2142 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002143 __ Push(Immediate(var->name()));
2144 __ Push(eax);
2145 __ CallRuntime(is_strict(language_mode())
2146 ? Runtime::kStoreLookupSlot_Strict
2147 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002148 } else {
2149 // Assignment to var or initializing assignment to let/const in harmony
2150 // mode.
2151 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2152 MemOperand location = VarOperand(var, ecx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002153 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002154 // Check for an uninitialized let binding.
2155 __ mov(edx, location);
2156 __ cmp(edx, isolate()->factory()->the_hole_value());
2157 __ Check(equal, kLetBindingReInitialization);
2158 }
2159 EmitStoreToStackLocalOrContextSlot(var, location);
2160 }
2161
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002162 } else {
2163 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2164 if (is_strict(language_mode())) {
2165 __ CallRuntime(Runtime::kThrowConstAssignError);
2166 }
2167 // Silently ignore store in sloppy mode.
2168 }
2169}
2170
2171
2172void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2173 // Assignment to a property, using a named store IC.
2174 // eax : value
2175 // esp[0] : receiver
2176 Property* prop = expr->target()->AsProperty();
2177 DCHECK(prop != NULL);
2178 DCHECK(prop->key()->IsLiteral());
2179
2180 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002181 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002182 EmitLoadStoreICSlot(expr->AssignmentSlot());
2183 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01002184 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002185 context()->Plug(eax);
2186}
2187
2188
2189void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2190 // Assignment to named property of super.
2191 // eax : value
2192 // stack : receiver ('this'), home_object
2193 DCHECK(prop != NULL);
2194 Literal* key = prop->key()->AsLiteral();
2195 DCHECK(key != NULL);
2196
Ben Murdoch097c5b22016-05-18 11:27:45 +01002197 PushOperand(key->value());
2198 PushOperand(eax);
2199 CallRuntimeWithOperands(is_strict(language_mode())
2200 ? Runtime::kStoreToSuper_Strict
2201 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002202}
2203
2204
2205void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2206 // Assignment to named property of super.
2207 // eax : value
2208 // stack : receiver ('this'), home_object, key
2209
Ben Murdoch097c5b22016-05-18 11:27:45 +01002210 PushOperand(eax);
2211 CallRuntimeWithOperands(is_strict(language_mode())
2212 ? Runtime::kStoreKeyedToSuper_Strict
2213 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002214}
2215
2216
2217void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2218 // Assignment to a property, using a keyed store IC.
2219 // eax : value
2220 // esp[0] : key
2221 // esp[kPointerSize] : receiver
2222
Ben Murdoch097c5b22016-05-18 11:27:45 +01002223 PopOperand(StoreDescriptor::NameRegister()); // Key.
2224 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002225 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2226 Handle<Code> ic =
2227 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2228 EmitLoadStoreICSlot(expr->AssignmentSlot());
2229 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01002230 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002231 context()->Plug(eax);
2232}
2233
2234
2235void FullCodeGenerator::CallIC(Handle<Code> code,
2236 TypeFeedbackId ast_id) {
2237 ic_total_count_++;
2238 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2239}
2240
2241
2242// Code common for calls using the IC.
2243void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2244 Expression* callee = expr->expression();
2245
2246 // Get the target function.
2247 ConvertReceiverMode convert_mode;
2248 if (callee->IsVariableProxy()) {
2249 { StackValueContext context(this);
2250 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002251 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002252 }
2253 // Push undefined as receiver. This is patched in the method prologue if it
2254 // is a sloppy mode method.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002255 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002256 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2257 } else {
2258 // Load the function from the receiver.
2259 DCHECK(callee->IsProperty());
2260 DCHECK(!callee->AsProperty()->IsSuperAccess());
2261 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2262 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002263 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2264 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002265 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002266 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002267 __ mov(Operand(esp, kPointerSize), eax);
2268 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2269 }
2270
2271 EmitCall(expr, convert_mode);
2272}
2273
2274
2275void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2276 SetExpressionPosition(expr);
2277 Expression* callee = expr->expression();
2278 DCHECK(callee->IsProperty());
2279 Property* prop = callee->AsProperty();
2280 DCHECK(prop->IsSuperAccess());
2281
2282 Literal* key = prop->key()->AsLiteral();
2283 DCHECK(!key->value()->IsSmi());
2284 // Load the function from the receiver.
2285 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2286 VisitForStackValue(super_ref->home_object());
2287 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002288 PushOperand(eax);
2289 PushOperand(eax);
2290 PushOperand(Operand(esp, kPointerSize * 2));
2291 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002292 // Stack here:
2293 // - home_object
2294 // - this (receiver)
2295 // - this (receiver) <-- LoadFromSuper will pop here and below.
2296 // - home_object
2297 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002298 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002299 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002300
2301 // Replace home_object with target function.
2302 __ mov(Operand(esp, kPointerSize), eax);
2303
2304 // Stack here:
2305 // - target function
2306 // - this (receiver)
2307 EmitCall(expr);
2308}
2309
2310
2311// Code common for calls using the IC.
2312void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2313 Expression* key) {
2314 // Load the key.
2315 VisitForAccumulatorValue(key);
2316
2317 Expression* callee = expr->expression();
2318
2319 // Load the function from the receiver.
2320 DCHECK(callee->IsProperty());
2321 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2322 __ mov(LoadDescriptor::NameRegister(), eax);
2323 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002324 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2325 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002326
2327 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002328 PushOperand(Operand(esp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002329 __ mov(Operand(esp, kPointerSize), eax);
2330
2331 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2332}
2333
2334
2335void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2336 Expression* callee = expr->expression();
2337 DCHECK(callee->IsProperty());
2338 Property* prop = callee->AsProperty();
2339 DCHECK(prop->IsSuperAccess());
2340
2341 SetExpressionPosition(prop);
2342 // Load the function from the receiver.
2343 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2344 VisitForStackValue(super_ref->home_object());
2345 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002346 PushOperand(eax);
2347 PushOperand(eax);
2348 PushOperand(Operand(esp, kPointerSize * 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002349 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002350 // Stack here:
2351 // - home_object
2352 // - this (receiver)
2353 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2354 // - home_object
2355 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002356 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002357 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002358
2359 // Replace home_object with target function.
2360 __ mov(Operand(esp, kPointerSize), eax);
2361
2362 // Stack here:
2363 // - target function
2364 // - this (receiver)
2365 EmitCall(expr);
2366}
2367
2368
2369void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2370 // Load the arguments.
2371 ZoneList<Expression*>* args = expr->arguments();
2372 int arg_count = args->length();
2373 for (int i = 0; i < arg_count; i++) {
2374 VisitForStackValue(args->at(i));
2375 }
2376
Ben Murdochc5610432016-08-08 18:44:38 +01002377 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002378 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002379 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2380 if (FLAG_trace) {
2381 __ CallRuntime(Runtime::kTraceTailCall);
2382 }
2383 // Update profiling counters before the tail call since we will
2384 // not return to this function.
2385 EmitProfilingCounterHandlingForReturnSequence(true);
2386 }
2387 Handle<Code> ic =
2388 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2389 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002390 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2391 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2392 // Don't assign a type feedback id to the IC, since type feedback is provided
2393 // by the vector above.
2394 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002395 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002396
2397 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002398 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002399 context()->DropAndPlug(1, eax);
2400}
2401
Ben Murdochc5610432016-08-08 18:44:38 +01002402void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2403 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002404 // Push copy of the first argument or undefined if it doesn't exist.
2405 if (arg_count > 0) {
2406 __ push(Operand(esp, arg_count * kPointerSize));
2407 } else {
2408 __ push(Immediate(isolate()->factory()->undefined_value()));
2409 }
2410
2411 // Push the enclosing function.
2412 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2413
2414 // Push the language mode.
2415 __ push(Immediate(Smi::FromInt(language_mode())));
2416
2417 // Push the start position of the scope the calls resides in.
2418 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2419
Ben Murdochc5610432016-08-08 18:44:38 +01002420 // Push the source position of the eval call.
2421 __ push(Immediate(Smi::FromInt(expr->position())));
2422
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002423 // Do the runtime call.
2424 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2425}
2426
2427
2428// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2429void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2430 VariableProxy* callee = expr->expression()->AsVariableProxy();
2431 if (callee->var()->IsLookupSlot()) {
2432 Label slow, done;
2433 SetExpressionPosition(callee);
2434 // Generate code for loading from variables potentially shadowed by
2435 // eval-introduced variables.
2436 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2437
2438 __ bind(&slow);
2439 // Call the runtime to find the function to call (returned in eax) and
2440 // the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002441 __ Push(callee->name());
2442 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2443 PushOperand(eax); // Function.
2444 PushOperand(edx); // Receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002445 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446
2447 // If fast case code has been generated, emit code to push the function
2448 // and receiver and have the slow path jump around this code.
2449 if (done.is_linked()) {
2450 Label call;
2451 __ jmp(&call, Label::kNear);
2452 __ bind(&done);
2453 // Push function.
2454 __ push(eax);
2455 // The receiver is implicitly the global receiver. Indicate this by
2456 // passing the hole to the call function stub.
2457 __ push(Immediate(isolate()->factory()->undefined_value()));
2458 __ bind(&call);
2459 }
2460 } else {
2461 VisitForStackValue(callee);
2462 // refEnv.WithBaseObject()
Ben Murdoch097c5b22016-05-18 11:27:45 +01002463 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002464 }
2465}
2466
2467
2468void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002469 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002470 // to resolve the function we need to call. Then we call the resolved
2471 // function using the given arguments.
2472 ZoneList<Expression*>* args = expr->arguments();
2473 int arg_count = args->length();
2474
2475 PushCalleeAndWithBaseObject(expr);
2476
2477 // Push the arguments.
2478 for (int i = 0; i < arg_count; i++) {
2479 VisitForStackValue(args->at(i));
2480 }
2481
2482 // Push a copy of the function (found below the arguments) and
2483 // resolve eval.
2484 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01002485 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002486
2487 // Touch up the stack with the resolved function.
2488 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2489
Ben Murdochc5610432016-08-08 18:44:38 +01002490 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002491
2492 SetCallPosition(expr);
2493 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2494 __ Set(eax, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002495 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2496 expr->tail_call_mode()),
2497 RelocInfo::CODE_TARGET);
2498 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002499 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002500 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002501 context()->DropAndPlug(1, eax);
2502}
2503
2504
2505void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2506 Comment cmnt(masm_, "[ CallNew");
2507 // According to ECMA-262, section 11.2.2, page 44, the function
2508 // expression in new calls must be evaluated before the
2509 // arguments.
2510
2511 // Push constructor on the stack. If it's not a function it's used as
2512 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2513 // ignored.
2514 DCHECK(!expr->expression()->IsSuperPropertyReference());
2515 VisitForStackValue(expr->expression());
2516
2517 // Push the arguments ("left-to-right") on the stack.
2518 ZoneList<Expression*>* args = expr->arguments();
2519 int arg_count = args->length();
2520 for (int i = 0; i < arg_count; i++) {
2521 VisitForStackValue(args->at(i));
2522 }
2523
2524 // Call the construct call builtin that handles allocation and
2525 // constructor invocation.
2526 SetConstructCallPosition(expr);
2527
2528 // Load function and argument count into edi and eax.
2529 __ Move(eax, Immediate(arg_count));
2530 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2531
2532 // Record call targets in unoptimized code.
2533 __ EmitLoadTypeFeedbackVector(ebx);
2534 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2535
2536 CallConstructStub stub(isolate());
2537 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002538 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002539 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2540 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002541 context()->Plug(eax);
2542}
2543
2544
2545void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2546 SuperCallReference* super_call_ref =
2547 expr->expression()->AsSuperCallReference();
2548 DCHECK_NOT_NULL(super_call_ref);
2549
2550 // Push the super constructor target on the stack (may be null,
2551 // but the Construct builtin can deal with that properly).
2552 VisitForAccumulatorValue(super_call_ref->this_function_var());
2553 __ AssertFunction(result_register());
2554 __ mov(result_register(),
2555 FieldOperand(result_register(), HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002556 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002557
2558 // Push the arguments ("left-to-right") on the stack.
2559 ZoneList<Expression*>* args = expr->arguments();
2560 int arg_count = args->length();
2561 for (int i = 0; i < arg_count; i++) {
2562 VisitForStackValue(args->at(i));
2563 }
2564
2565 // Call the construct call builtin that handles allocation and
2566 // constructor invocation.
2567 SetConstructCallPosition(expr);
2568
2569 // Load new target into edx.
2570 VisitForAccumulatorValue(super_call_ref->new_target_var());
2571 __ mov(edx, result_register());
2572
2573 // Load function and argument count into edi and eax.
2574 __ Move(eax, Immediate(arg_count));
2575 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2576
2577 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002578 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002579
2580 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002581 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002582 context()->Plug(eax);
2583}
2584
2585
2586void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2587 ZoneList<Expression*>* args = expr->arguments();
2588 DCHECK(args->length() == 1);
2589
2590 VisitForAccumulatorValue(args->at(0));
2591
2592 Label materialize_true, materialize_false;
2593 Label* if_true = NULL;
2594 Label* if_false = NULL;
2595 Label* fall_through = NULL;
2596 context()->PrepareTest(&materialize_true, &materialize_false,
2597 &if_true, &if_false, &fall_through);
2598
2599 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2600 __ test(eax, Immediate(kSmiTagMask));
2601 Split(zero, if_true, if_false, fall_through);
2602
2603 context()->Plug(if_true, if_false);
2604}
2605
2606
2607void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2608 ZoneList<Expression*>* args = expr->arguments();
2609 DCHECK(args->length() == 1);
2610
2611 VisitForAccumulatorValue(args->at(0));
2612
2613 Label materialize_true, materialize_false;
2614 Label* if_true = NULL;
2615 Label* if_false = NULL;
2616 Label* fall_through = NULL;
2617 context()->PrepareTest(&materialize_true, &materialize_false,
2618 &if_true, &if_false, &fall_through);
2619
2620 __ JumpIfSmi(eax, if_false);
2621 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2622 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2623 Split(above_equal, if_true, if_false, fall_through);
2624
2625 context()->Plug(if_true, if_false);
2626}
2627
2628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002629void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2630 ZoneList<Expression*>* args = expr->arguments();
2631 DCHECK(args->length() == 1);
2632
2633 VisitForAccumulatorValue(args->at(0));
2634
2635 Label materialize_true, materialize_false;
2636 Label* if_true = NULL;
2637 Label* if_false = NULL;
2638 Label* fall_through = NULL;
2639 context()->PrepareTest(&materialize_true, &materialize_false,
2640 &if_true, &if_false, &fall_through);
2641
2642 __ JumpIfSmi(eax, if_false);
2643 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2644 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2645 Split(equal, if_true, if_false, fall_through);
2646
2647 context()->Plug(if_true, if_false);
2648}
2649
2650
2651void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2652 ZoneList<Expression*>* args = expr->arguments();
2653 DCHECK(args->length() == 1);
2654
2655 VisitForAccumulatorValue(args->at(0));
2656
2657 Label materialize_true, materialize_false;
2658 Label* if_true = NULL;
2659 Label* if_false = NULL;
2660 Label* fall_through = NULL;
2661 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2662 &if_false, &fall_through);
2663
2664 __ JumpIfSmi(eax, if_false);
2665 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2666 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2667 Split(equal, if_true, if_false, fall_through);
2668
2669 context()->Plug(if_true, if_false);
2670}
2671
2672
2673void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2674 ZoneList<Expression*>* args = expr->arguments();
2675 DCHECK(args->length() == 1);
2676
2677 VisitForAccumulatorValue(args->at(0));
2678
2679 Label materialize_true, materialize_false;
2680 Label* if_true = NULL;
2681 Label* if_false = NULL;
2682 Label* fall_through = NULL;
2683 context()->PrepareTest(&materialize_true, &materialize_false,
2684 &if_true, &if_false, &fall_through);
2685
2686 __ JumpIfSmi(eax, if_false);
2687 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2688 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2689 Split(equal, if_true, if_false, fall_through);
2690
2691 context()->Plug(if_true, if_false);
2692}
2693
2694
2695void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2696 ZoneList<Expression*>* args = expr->arguments();
2697 DCHECK(args->length() == 1);
2698
2699 VisitForAccumulatorValue(args->at(0));
2700
2701 Label materialize_true, materialize_false;
2702 Label* if_true = NULL;
2703 Label* if_false = NULL;
2704 Label* fall_through = NULL;
2705 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2706 &if_false, &fall_through);
2707
2708 __ JumpIfSmi(eax, if_false);
2709 __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2710 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2711 Split(equal, if_true, if_false, fall_through);
2712
2713 context()->Plug(if_true, if_false);
2714}
2715
2716
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002717void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2718 ZoneList<Expression*>* args = expr->arguments();
2719 DCHECK(args->length() == 1);
2720 Label done, null, function, non_function_constructor;
2721
2722 VisitForAccumulatorValue(args->at(0));
2723
2724 // If the object is not a JSReceiver, we return null.
2725 __ JumpIfSmi(eax, &null, Label::kNear);
2726 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2727 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2728 __ j(below, &null, Label::kNear);
2729
Ben Murdochda12d292016-06-02 14:46:10 +01002730 // Return 'Function' for JSFunction and JSBoundFunction objects.
2731 __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
2732 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2733 __ j(above_equal, &function, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002734
2735 // Check if the constructor in the map is a JS function.
2736 __ GetMapConstructor(eax, eax, ebx);
2737 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2738 __ j(not_equal, &non_function_constructor, Label::kNear);
2739
2740 // eax now contains the constructor function. Grab the
2741 // instance class name from there.
2742 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2743 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2744 __ jmp(&done, Label::kNear);
2745
2746 // Non-JS objects have class null.
2747 __ bind(&null);
2748 __ mov(eax, isolate()->factory()->null_value());
2749 __ jmp(&done, Label::kNear);
2750
2751 // Functions have class 'Function'.
2752 __ bind(&function);
2753 __ mov(eax, isolate()->factory()->Function_string());
2754 __ jmp(&done, Label::kNear);
2755
2756 // Objects with a non-function constructor have class 'Object'.
2757 __ bind(&non_function_constructor);
2758 __ mov(eax, isolate()->factory()->Object_string());
2759
2760 // All done.
2761 __ bind(&done);
2762
2763 context()->Plug(eax);
2764}
2765
2766
2767void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2768 ZoneList<Expression*>* args = expr->arguments();
2769 DCHECK(args->length() == 1);
2770
2771 VisitForAccumulatorValue(args->at(0)); // Load the object.
2772
2773 Label done;
2774 // If the object is a smi return the object.
2775 __ JumpIfSmi(eax, &done, Label::kNear);
2776 // If the object is not a value type, return the object.
2777 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2778 __ j(not_equal, &done, Label::kNear);
2779 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2780
2781 __ bind(&done);
2782 context()->Plug(eax);
2783}
2784
2785
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002786void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2787 ZoneList<Expression*>* args = expr->arguments();
2788 DCHECK_EQ(3, args->length());
2789
2790 Register string = eax;
2791 Register index = ebx;
2792 Register value = ecx;
2793
2794 VisitForStackValue(args->at(0)); // index
2795 VisitForStackValue(args->at(1)); // value
2796 VisitForAccumulatorValue(args->at(2)); // string
2797
Ben Murdoch097c5b22016-05-18 11:27:45 +01002798 PopOperand(value);
2799 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002800
2801 if (FLAG_debug_code) {
2802 __ test(value, Immediate(kSmiTagMask));
2803 __ Check(zero, kNonSmiValue);
2804 __ test(index, Immediate(kSmiTagMask));
2805 __ Check(zero, kNonSmiValue);
2806 }
2807
2808 __ SmiUntag(value);
2809 __ SmiUntag(index);
2810
2811 if (FLAG_debug_code) {
2812 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2813 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
2814 }
2815
2816 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
2817 value);
2818 context()->Plug(string);
2819}
2820
2821
2822void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2823 ZoneList<Expression*>* args = expr->arguments();
2824 DCHECK_EQ(3, args->length());
2825
2826 Register string = eax;
2827 Register index = ebx;
2828 Register value = ecx;
2829
2830 VisitForStackValue(args->at(0)); // index
2831 VisitForStackValue(args->at(1)); // value
2832 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002833 PopOperand(value);
2834 PopOperand(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002835
2836 if (FLAG_debug_code) {
2837 __ test(value, Immediate(kSmiTagMask));
2838 __ Check(zero, kNonSmiValue);
2839 __ test(index, Immediate(kSmiTagMask));
2840 __ Check(zero, kNonSmiValue);
2841 __ SmiUntag(index);
2842 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2843 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
2844 __ SmiTag(index);
2845 }
2846
2847 __ SmiUntag(value);
2848 // No need to untag a smi for two-byte addressing.
2849 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
2850 value);
2851 context()->Plug(string);
2852}
2853
2854
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002855void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2856 ZoneList<Expression*>* args = expr->arguments();
2857 DCHECK(args->length() == 1);
2858
2859 VisitForAccumulatorValue(args->at(0));
2860
2861 Label done;
2862 StringCharFromCodeGenerator generator(eax, ebx);
2863 generator.GenerateFast(masm_);
2864 __ jmp(&done);
2865
2866 NopRuntimeCallHelper call_helper;
2867 generator.GenerateSlow(masm_, call_helper);
2868
2869 __ bind(&done);
2870 context()->Plug(ebx);
2871}
2872
2873
2874void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2875 ZoneList<Expression*>* args = expr->arguments();
2876 DCHECK(args->length() == 2);
2877
2878 VisitForStackValue(args->at(0));
2879 VisitForAccumulatorValue(args->at(1));
2880
2881 Register object = ebx;
2882 Register index = eax;
2883 Register result = edx;
2884
Ben Murdoch097c5b22016-05-18 11:27:45 +01002885 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002886
2887 Label need_conversion;
2888 Label index_out_of_range;
2889 Label done;
2890 StringCharCodeAtGenerator generator(object,
2891 index,
2892 result,
2893 &need_conversion,
2894 &need_conversion,
2895 &index_out_of_range,
2896 STRING_INDEX_IS_NUMBER);
2897 generator.GenerateFast(masm_);
2898 __ jmp(&done);
2899
2900 __ bind(&index_out_of_range);
2901 // When the index is out of range, the spec requires us to return
2902 // NaN.
2903 __ Move(result, Immediate(isolate()->factory()->nan_value()));
2904 __ jmp(&done);
2905
2906 __ bind(&need_conversion);
2907 // Move the undefined value into the result register, which will
2908 // trigger conversion.
2909 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
2910 __ jmp(&done);
2911
2912 NopRuntimeCallHelper call_helper;
2913 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2914
2915 __ bind(&done);
2916 context()->Plug(result);
2917}
2918
2919
2920void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
2921 ZoneList<Expression*>* args = expr->arguments();
2922 DCHECK(args->length() == 2);
2923
2924 VisitForStackValue(args->at(0));
2925 VisitForAccumulatorValue(args->at(1));
2926
2927 Register object = ebx;
2928 Register index = eax;
2929 Register scratch = edx;
2930 Register result = eax;
2931
Ben Murdoch097c5b22016-05-18 11:27:45 +01002932 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002933
2934 Label need_conversion;
2935 Label index_out_of_range;
2936 Label done;
2937 StringCharAtGenerator generator(object,
2938 index,
2939 scratch,
2940 result,
2941 &need_conversion,
2942 &need_conversion,
2943 &index_out_of_range,
2944 STRING_INDEX_IS_NUMBER);
2945 generator.GenerateFast(masm_);
2946 __ jmp(&done);
2947
2948 __ bind(&index_out_of_range);
2949 // When the index is out of range, the spec requires us to return
2950 // the empty string.
2951 __ Move(result, Immediate(isolate()->factory()->empty_string()));
2952 __ jmp(&done);
2953
2954 __ bind(&need_conversion);
2955 // Move smi zero into the result register, which will trigger
2956 // conversion.
2957 __ Move(result, Immediate(Smi::FromInt(0)));
2958 __ jmp(&done);
2959
2960 NopRuntimeCallHelper call_helper;
2961 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2962
2963 __ bind(&done);
2964 context()->Plug(result);
2965}
2966
2967
2968void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2969 ZoneList<Expression*>* args = expr->arguments();
2970 DCHECK_LE(2, args->length());
2971 // Push target, receiver and arguments onto the stack.
2972 for (Expression* const arg : *args) {
2973 VisitForStackValue(arg);
2974 }
Ben Murdochc5610432016-08-08 18:44:38 +01002975 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002976 // Move target to edi.
2977 int const argc = args->length() - 2;
2978 __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
2979 // Call the target.
2980 __ mov(eax, Immediate(argc));
2981 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002982 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002983 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002984 // Discard the function left on TOS.
2985 context()->DropAndPlug(1, eax);
2986}
2987
2988
2989void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2990 ZoneList<Expression*>* args = expr->arguments();
2991 DCHECK(args->length() == 1);
2992
2993 VisitForAccumulatorValue(args->at(0));
2994
2995 __ AssertString(eax);
2996
2997 Label materialize_true, materialize_false;
2998 Label* if_true = NULL;
2999 Label* if_false = NULL;
3000 Label* fall_through = NULL;
3001 context()->PrepareTest(&materialize_true, &materialize_false,
3002 &if_true, &if_false, &fall_through);
3003
3004 __ test(FieldOperand(eax, String::kHashFieldOffset),
3005 Immediate(String::kContainsCachedArrayIndexMask));
3006 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3007 Split(zero, if_true, if_false, fall_through);
3008
3009 context()->Plug(if_true, if_false);
3010}
3011
3012
3013void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3014 ZoneList<Expression*>* args = expr->arguments();
3015 DCHECK(args->length() == 1);
3016 VisitForAccumulatorValue(args->at(0));
3017
3018 __ AssertString(eax);
3019
3020 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3021 __ IndexFromHash(eax, eax);
3022
3023 context()->Plug(eax);
3024}
3025
3026
3027void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3028 ZoneList<Expression*>* args = expr->arguments();
3029 DCHECK_EQ(1, args->length());
3030 VisitForAccumulatorValue(args->at(0));
3031 __ AssertFunction(eax);
3032 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3033 __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
3034 context()->Plug(eax);
3035}
3036
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003037void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3038 DCHECK(expr->arguments()->length() == 0);
3039 ExternalReference debug_is_active =
3040 ExternalReference::debug_is_active_address(isolate());
3041 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
3042 __ SmiTag(eax);
3043 context()->Plug(eax);
3044}
3045
3046
3047void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3048 ZoneList<Expression*>* args = expr->arguments();
3049 DCHECK_EQ(2, args->length());
3050 VisitForStackValue(args->at(0));
3051 VisitForStackValue(args->at(1));
3052
3053 Label runtime, done;
3054
Ben Murdochc5610432016-08-08 18:44:38 +01003055 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime,
3056 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003057 __ mov(ebx, NativeContextOperand());
3058 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
3059 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
3060 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3061 isolate()->factory()->empty_fixed_array());
3062 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3063 isolate()->factory()->empty_fixed_array());
3064 __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
3065 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
3066 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3067 __ jmp(&done, Label::kNear);
3068
3069 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003070 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003071
3072 __ bind(&done);
3073 context()->Plug(eax);
3074}
3075
3076
3077void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003078 // Push function.
3079 __ LoadGlobalFunction(expr->context_index(), eax);
3080 PushOperand(eax);
3081
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003082 // Push undefined as receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003083 PushOperand(isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003084}
3085
3086
3087void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3088 ZoneList<Expression*>* args = expr->arguments();
3089 int arg_count = args->length();
3090
3091 SetCallPosition(expr);
3092 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3093 __ Set(eax, arg_count);
3094 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3095 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003096 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003097 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003098}
3099
3100
3101void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3102 switch (expr->op()) {
3103 case Token::DELETE: {
3104 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3105 Property* property = expr->expression()->AsProperty();
3106 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3107
3108 if (property != NULL) {
3109 VisitForStackValue(property->obj());
3110 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003111 CallRuntimeWithOperands(is_strict(language_mode())
3112 ? Runtime::kDeleteProperty_Strict
3113 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003114 context()->Plug(eax);
3115 } else if (proxy != NULL) {
3116 Variable* var = proxy->var();
3117 // Delete of an unqualified identifier is disallowed in strict mode but
3118 // "delete this" is allowed.
3119 bool is_this = var->HasThisName(isolate());
3120 DCHECK(is_sloppy(language_mode()) || is_this);
3121 if (var->IsUnallocatedOrGlobalSlot()) {
3122 __ mov(eax, NativeContextOperand());
3123 __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
3124 __ push(Immediate(var->name()));
3125 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3126 context()->Plug(eax);
3127 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3128 // Result of deleting non-global variables is false. 'this' is
3129 // not really a variable, though we implement it as one. The
3130 // subexpression does not have side effects.
3131 context()->Plug(is_this);
3132 } else {
3133 // Non-global variable. Call the runtime to try to delete from the
3134 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003135 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003136 __ CallRuntime(Runtime::kDeleteLookupSlot);
3137 context()->Plug(eax);
3138 }
3139 } else {
3140 // Result of deleting non-property, non-variable reference is true.
3141 // The subexpression may have side effects.
3142 VisitForEffect(expr->expression());
3143 context()->Plug(true);
3144 }
3145 break;
3146 }
3147
3148 case Token::VOID: {
3149 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3150 VisitForEffect(expr->expression());
3151 context()->Plug(isolate()->factory()->undefined_value());
3152 break;
3153 }
3154
3155 case Token::NOT: {
3156 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3157 if (context()->IsEffect()) {
3158 // Unary NOT has no side effects so it's only necessary to visit the
3159 // subexpression. Match the optimizing compiler by not branching.
3160 VisitForEffect(expr->expression());
3161 } else if (context()->IsTest()) {
3162 const TestContext* test = TestContext::cast(context());
3163 // The labels are swapped for the recursive call.
3164 VisitForControl(expr->expression(),
3165 test->false_label(),
3166 test->true_label(),
3167 test->fall_through());
3168 context()->Plug(test->true_label(), test->false_label());
3169 } else {
3170 // We handle value contexts explicitly rather than simply visiting
3171 // for control and plugging the control flow into the context,
3172 // because we need to prepare a pair of extra administrative AST ids
3173 // for the optimizing compiler.
3174 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3175 Label materialize_true, materialize_false, done;
3176 VisitForControl(expr->expression(),
3177 &materialize_false,
3178 &materialize_true,
3179 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003180 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003181 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003182 PrepareForBailoutForId(expr->MaterializeTrueId(),
3183 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003184 if (context()->IsAccumulatorValue()) {
3185 __ mov(eax, isolate()->factory()->true_value());
3186 } else {
3187 __ Push(isolate()->factory()->true_value());
3188 }
3189 __ jmp(&done, Label::kNear);
3190 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003191 PrepareForBailoutForId(expr->MaterializeFalseId(),
3192 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003193 if (context()->IsAccumulatorValue()) {
3194 __ mov(eax, isolate()->factory()->false_value());
3195 } else {
3196 __ Push(isolate()->factory()->false_value());
3197 }
3198 __ bind(&done);
3199 }
3200 break;
3201 }
3202
3203 case Token::TYPEOF: {
3204 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3205 {
3206 AccumulatorValueContext context(this);
3207 VisitForTypeofValue(expr->expression());
3208 }
3209 __ mov(ebx, eax);
3210 TypeofStub typeof_stub(isolate());
3211 __ CallStub(&typeof_stub);
3212 context()->Plug(eax);
3213 break;
3214 }
3215
3216 default:
3217 UNREACHABLE();
3218 }
3219}
3220
3221
3222void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3223 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3224
3225 Comment cmnt(masm_, "[ CountOperation");
3226
3227 Property* prop = expr->expression()->AsProperty();
3228 LhsKind assign_type = Property::GetAssignType(prop);
3229
3230 // Evaluate expression and get value.
3231 if (assign_type == VARIABLE) {
3232 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3233 AccumulatorValueContext context(this);
3234 EmitVariableLoad(expr->expression()->AsVariableProxy());
3235 } else {
3236 // Reserve space for result of postfix operation.
3237 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003238 PushOperand(Smi::FromInt(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003239 }
3240 switch (assign_type) {
3241 case NAMED_PROPERTY: {
3242 // Put the object both on the stack and in the register.
3243 VisitForStackValue(prop->obj());
3244 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
3245 EmitNamedPropertyLoad(prop);
3246 break;
3247 }
3248
3249 case NAMED_SUPER_PROPERTY: {
3250 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3251 VisitForAccumulatorValue(
3252 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003253 PushOperand(result_register());
3254 PushOperand(MemOperand(esp, kPointerSize));
3255 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003256 EmitNamedSuperPropertyLoad(prop);
3257 break;
3258 }
3259
3260 case KEYED_SUPER_PROPERTY: {
3261 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3262 VisitForStackValue(
3263 prop->obj()->AsSuperPropertyReference()->home_object());
3264 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003265 PushOperand(result_register());
3266 PushOperand(MemOperand(esp, 2 * kPointerSize));
3267 PushOperand(MemOperand(esp, 2 * kPointerSize));
3268 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003269 EmitKeyedSuperPropertyLoad(prop);
3270 break;
3271 }
3272
3273 case KEYED_PROPERTY: {
3274 VisitForStackValue(prop->obj());
3275 VisitForStackValue(prop->key());
3276 __ mov(LoadDescriptor::ReceiverRegister(),
3277 Operand(esp, kPointerSize)); // Object.
3278 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
3279 EmitKeyedPropertyLoad(prop);
3280 break;
3281 }
3282
3283 case VARIABLE:
3284 UNREACHABLE();
3285 }
3286 }
3287
3288 // We need a second deoptimization point after loading the value
3289 // in case evaluating the property load my have a side effect.
3290 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003291 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003292 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003293 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003294 }
3295
3296 // Inline smi case if we are in a loop.
3297 Label done, stub_call;
3298 JumpPatchSite patch_site(masm_);
3299 if (ShouldInlineSmiCase(expr->op())) {
3300 Label slow;
3301 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
3302
3303 // Save result for postfix expressions.
3304 if (expr->is_postfix()) {
3305 if (!context()->IsEffect()) {
3306 // Save the result on the stack. If we have a named or keyed property
3307 // we store the result under the receiver that is currently on top
3308 // of the stack.
3309 switch (assign_type) {
3310 case VARIABLE:
3311 __ push(eax);
3312 break;
3313 case NAMED_PROPERTY:
3314 __ mov(Operand(esp, kPointerSize), eax);
3315 break;
3316 case NAMED_SUPER_PROPERTY:
3317 __ mov(Operand(esp, 2 * kPointerSize), eax);
3318 break;
3319 case KEYED_PROPERTY:
3320 __ mov(Operand(esp, 2 * kPointerSize), eax);
3321 break;
3322 case KEYED_SUPER_PROPERTY:
3323 __ mov(Operand(esp, 3 * kPointerSize), eax);
3324 break;
3325 }
3326 }
3327 }
3328
3329 if (expr->op() == Token::INC) {
3330 __ add(eax, Immediate(Smi::FromInt(1)));
3331 } else {
3332 __ sub(eax, Immediate(Smi::FromInt(1)));
3333 }
3334 __ j(no_overflow, &done, Label::kNear);
3335 // Call stub. Undo operation first.
3336 if (expr->op() == Token::INC) {
3337 __ sub(eax, Immediate(Smi::FromInt(1)));
3338 } else {
3339 __ add(eax, Immediate(Smi::FromInt(1)));
3340 }
3341 __ jmp(&stub_call, Label::kNear);
3342 __ bind(&slow);
3343 }
Ben Murdochda12d292016-06-02 14:46:10 +01003344
3345 // Convert old value into a number.
3346 ToNumberStub convert_stub(isolate());
3347 __ CallStub(&convert_stub);
Ben Murdochc5610432016-08-08 18:44:38 +01003348 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003349
3350 // Save result for postfix expressions.
3351 if (expr->is_postfix()) {
3352 if (!context()->IsEffect()) {
3353 // Save the result on the stack. If we have a named or keyed property
3354 // we store the result under the receiver that is currently on top
3355 // of the stack.
3356 switch (assign_type) {
3357 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003358 PushOperand(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003359 break;
3360 case NAMED_PROPERTY:
3361 __ mov(Operand(esp, kPointerSize), eax);
3362 break;
3363 case NAMED_SUPER_PROPERTY:
3364 __ mov(Operand(esp, 2 * kPointerSize), eax);
3365 break;
3366 case KEYED_PROPERTY:
3367 __ mov(Operand(esp, 2 * kPointerSize), eax);
3368 break;
3369 case KEYED_SUPER_PROPERTY:
3370 __ mov(Operand(esp, 3 * kPointerSize), eax);
3371 break;
3372 }
3373 }
3374 }
3375
3376 SetExpressionPosition(expr);
3377
3378 // Call stub for +1/-1.
3379 __ bind(&stub_call);
3380 __ mov(edx, eax);
3381 __ mov(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003382 Handle<Code> code =
3383 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003384 CallIC(code, expr->CountBinOpFeedbackId());
3385 patch_site.EmitPatchInfo();
3386 __ bind(&done);
3387
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003388 // Store the value returned in eax.
3389 switch (assign_type) {
3390 case VARIABLE:
3391 if (expr->is_postfix()) {
3392 // Perform the assignment as if via '='.
3393 { EffectContext context(this);
3394 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3395 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003396 PrepareForBailoutForId(expr->AssignmentId(),
3397 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003398 context.Plug(eax);
3399 }
3400 // For all contexts except EffectContext We have the result on
3401 // top of the stack.
3402 if (!context()->IsEffect()) {
3403 context()->PlugTOS();
3404 }
3405 } else {
3406 // Perform the assignment as if via '='.
3407 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3408 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003409 PrepareForBailoutForId(expr->AssignmentId(),
3410 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003411 context()->Plug(eax);
3412 }
3413 break;
3414 case NAMED_PROPERTY: {
3415 __ mov(StoreDescriptor::NameRegister(),
3416 prop->key()->AsLiteral()->value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003417 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003418 EmitLoadStoreICSlot(expr->CountSlot());
3419 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003420 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003421 if (expr->is_postfix()) {
3422 if (!context()->IsEffect()) {
3423 context()->PlugTOS();
3424 }
3425 } else {
3426 context()->Plug(eax);
3427 }
3428 break;
3429 }
3430 case NAMED_SUPER_PROPERTY: {
3431 EmitNamedSuperPropertyStore(prop);
3432 if (expr->is_postfix()) {
3433 if (!context()->IsEffect()) {
3434 context()->PlugTOS();
3435 }
3436 } else {
3437 context()->Plug(eax);
3438 }
3439 break;
3440 }
3441 case KEYED_SUPER_PROPERTY: {
3442 EmitKeyedSuperPropertyStore(prop);
3443 if (expr->is_postfix()) {
3444 if (!context()->IsEffect()) {
3445 context()->PlugTOS();
3446 }
3447 } else {
3448 context()->Plug(eax);
3449 }
3450 break;
3451 }
3452 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003453 PopOperand(StoreDescriptor::NameRegister());
3454 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003455 Handle<Code> ic =
3456 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3457 EmitLoadStoreICSlot(expr->CountSlot());
3458 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003459 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003460 if (expr->is_postfix()) {
3461 // Result is on the stack
3462 if (!context()->IsEffect()) {
3463 context()->PlugTOS();
3464 }
3465 } else {
3466 context()->Plug(eax);
3467 }
3468 break;
3469 }
3470 }
3471}
3472
3473
3474void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3475 Expression* sub_expr,
3476 Handle<String> check) {
3477 Label materialize_true, materialize_false;
3478 Label* if_true = NULL;
3479 Label* if_false = NULL;
3480 Label* fall_through = NULL;
3481 context()->PrepareTest(&materialize_true, &materialize_false,
3482 &if_true, &if_false, &fall_through);
3483
3484 { AccumulatorValueContext context(this);
3485 VisitForTypeofValue(sub_expr);
3486 }
3487 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3488
3489 Factory* factory = isolate()->factory();
3490 if (String::Equals(check, factory->number_string())) {
3491 __ JumpIfSmi(eax, if_true);
3492 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3493 isolate()->factory()->heap_number_map());
3494 Split(equal, if_true, if_false, fall_through);
3495 } else if (String::Equals(check, factory->string_string())) {
3496 __ JumpIfSmi(eax, if_false);
3497 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3498 Split(below, if_true, if_false, fall_through);
3499 } else if (String::Equals(check, factory->symbol_string())) {
3500 __ JumpIfSmi(eax, if_false);
3501 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3502 Split(equal, if_true, if_false, fall_through);
3503 } else if (String::Equals(check, factory->boolean_string())) {
3504 __ cmp(eax, isolate()->factory()->true_value());
3505 __ j(equal, if_true);
3506 __ cmp(eax, isolate()->factory()->false_value());
3507 Split(equal, if_true, if_false, fall_through);
3508 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003509 __ cmp(eax, isolate()->factory()->null_value());
3510 __ j(equal, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003511 __ JumpIfSmi(eax, if_false);
3512 // Check for undetectable objects => true.
3513 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3514 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01003515 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003516 Split(not_zero, if_true, if_false, fall_through);
3517 } else if (String::Equals(check, factory->function_string())) {
3518 __ JumpIfSmi(eax, if_false);
3519 // Check for callable and not undetectable objects => true.
3520 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3521 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3522 __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3523 __ cmp(ecx, 1 << Map::kIsCallable);
3524 Split(equal, if_true, if_false, fall_through);
3525 } else if (String::Equals(check, factory->object_string())) {
3526 __ JumpIfSmi(eax, if_false);
3527 __ cmp(eax, isolate()->factory()->null_value());
3528 __ j(equal, if_true);
3529 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3530 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3531 __ j(below, if_false);
3532 // Check for callable or undetectable objects => false.
3533 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01003534 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003535 Split(zero, if_true, if_false, fall_through);
3536// clang-format off
3537#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3538 } else if (String::Equals(check, factory->type##_string())) { \
3539 __ JumpIfSmi(eax, if_false); \
3540 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \
3541 isolate()->factory()->type##_map()); \
3542 Split(equal, if_true, if_false, fall_through);
3543 SIMD128_TYPES(SIMD128_TYPE)
3544#undef SIMD128_TYPE
3545 // clang-format on
3546 } else {
3547 if (if_false != fall_through) __ jmp(if_false);
3548 }
3549 context()->Plug(if_true, if_false);
3550}
3551
3552
3553void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3554 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003555
3556 // First we try a fast inlined version of the compare when one of
3557 // the operands is a literal.
3558 if (TryLiteralCompare(expr)) return;
3559
3560 // Always perform the comparison for its control flow. Pack the result
3561 // into the expression's context after the comparison is performed.
3562 Label materialize_true, materialize_false;
3563 Label* if_true = NULL;
3564 Label* if_false = NULL;
3565 Label* fall_through = NULL;
3566 context()->PrepareTest(&materialize_true, &materialize_false,
3567 &if_true, &if_false, &fall_through);
3568
3569 Token::Value op = expr->op();
3570 VisitForStackValue(expr->left());
3571 switch (op) {
3572 case Token::IN:
3573 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003574 SetExpressionPosition(expr);
3575 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003576 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3577 __ cmp(eax, isolate()->factory()->true_value());
3578 Split(equal, if_true, if_false, fall_through);
3579 break;
3580
3581 case Token::INSTANCEOF: {
3582 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003583 SetExpressionPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003584 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003585 InstanceOfStub stub(isolate());
3586 __ CallStub(&stub);
3587 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3588 __ cmp(eax, isolate()->factory()->true_value());
3589 Split(equal, if_true, if_false, fall_through);
3590 break;
3591 }
3592
3593 default: {
3594 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003595 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003596 Condition cc = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003597 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003598
3599 bool inline_smi_code = ShouldInlineSmiCase(op);
3600 JumpPatchSite patch_site(masm_);
3601 if (inline_smi_code) {
3602 Label slow_case;
3603 __ mov(ecx, edx);
3604 __ or_(ecx, eax);
3605 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3606 __ cmp(edx, eax);
3607 Split(cc, if_true, if_false, NULL);
3608 __ bind(&slow_case);
3609 }
3610
Ben Murdoch097c5b22016-05-18 11:27:45 +01003611 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003612 CallIC(ic, expr->CompareOperationFeedbackId());
3613 patch_site.EmitPatchInfo();
3614
3615 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3616 __ test(eax, eax);
3617 Split(cc, if_true, if_false, fall_through);
3618 }
3619 }
3620
3621 // Convert the result of the comparison into one expected for this
3622 // expression's context.
3623 context()->Plug(if_true, if_false);
3624}
3625
3626
3627void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3628 Expression* sub_expr,
3629 NilValue nil) {
3630 Label materialize_true, materialize_false;
3631 Label* if_true = NULL;
3632 Label* if_false = NULL;
3633 Label* fall_through = NULL;
3634 context()->PrepareTest(&materialize_true, &materialize_false,
3635 &if_true, &if_false, &fall_through);
3636
3637 VisitForAccumulatorValue(sub_expr);
3638 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3639
3640 Handle<Object> nil_value = nil == kNullValue
3641 ? isolate()->factory()->null_value()
3642 : isolate()->factory()->undefined_value();
3643 if (expr->op() == Token::EQ_STRICT) {
3644 __ cmp(eax, nil_value);
3645 Split(equal, if_true, if_false, fall_through);
3646 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003647 __ JumpIfSmi(eax, if_false);
3648 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3649 __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
3650 Immediate(1 << Map::kIsUndetectable));
3651 Split(not_zero, if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003652 }
3653 context()->Plug(if_true, if_false);
3654}
3655
3656
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003657Register FullCodeGenerator::result_register() {
3658 return eax;
3659}
3660
3661
3662Register FullCodeGenerator::context_register() {
3663 return esi;
3664}
3665
Ben Murdochda12d292016-06-02 14:46:10 +01003666void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3667 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3668 __ mov(value, Operand(ebp, frame_offset));
3669}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003670
3671void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3672 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3673 __ mov(Operand(ebp, frame_offset), value);
3674}
3675
3676
3677void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3678 __ mov(dst, ContextOperand(esi, context_index));
3679}
3680
3681
3682void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3683 Scope* closure_scope = scope()->ClosureScope();
3684 if (closure_scope->is_script_scope() ||
3685 closure_scope->is_module_scope()) {
3686 // Contexts nested in the native context have a canonical empty function
3687 // as their closure, not the anonymous closure containing the global
3688 // code.
3689 __ mov(eax, NativeContextOperand());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003690 PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003691 } else if (closure_scope->is_eval_scope()) {
3692 // Contexts nested inside eval code have the same closure as the context
3693 // calling eval, not the anonymous closure containing the eval code.
3694 // Fetch it from the context.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003695 PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003696 } else {
3697 DCHECK(closure_scope->is_function_scope());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003698 PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003699 }
3700}
3701
3702
3703// ----------------------------------------------------------------------------
3704// Non-local control flow support.
3705
3706void FullCodeGenerator::EnterFinallyBlock() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003707 // Store pending message while executing finally block.
3708 ExternalReference pending_message_obj =
3709 ExternalReference::address_of_pending_message_obj(isolate());
3710 __ mov(edx, Operand::StaticVariable(pending_message_obj));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003711 PushOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003712
3713 ClearPendingMessage();
3714}
3715
3716
3717void FullCodeGenerator::ExitFinallyBlock() {
3718 DCHECK(!result_register().is(edx));
3719 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003720 PopOperand(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003721 ExternalReference pending_message_obj =
3722 ExternalReference::address_of_pending_message_obj(isolate());
3723 __ mov(Operand::StaticVariable(pending_message_obj), edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003724}
3725
3726
3727void FullCodeGenerator::ClearPendingMessage() {
3728 DCHECK(!result_register().is(edx));
3729 ExternalReference pending_message_obj =
3730 ExternalReference::address_of_pending_message_obj(isolate());
3731 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
3732 __ mov(Operand::StaticVariable(pending_message_obj), edx);
3733}
3734
3735
Ben Murdoch097c5b22016-05-18 11:27:45 +01003736void FullCodeGenerator::DeferredCommands::EmitCommands() {
3737 DCHECK(!result_register().is(edx));
3738 __ Pop(result_register()); // Restore the accumulator.
3739 __ Pop(edx); // Get the token.
3740 for (DeferredCommand cmd : commands_) {
3741 Label skip;
3742 __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
3743 __ j(not_equal, &skip);
3744 switch (cmd.command) {
3745 case kReturn:
3746 codegen_->EmitUnwindAndReturn();
3747 break;
3748 case kThrow:
3749 __ Push(result_register());
3750 __ CallRuntime(Runtime::kReThrow);
3751 break;
3752 case kContinue:
3753 codegen_->EmitContinue(cmd.target);
3754 break;
3755 case kBreak:
3756 codegen_->EmitBreak(cmd.target);
3757 break;
3758 }
3759 __ bind(&skip);
3760 }
3761}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003762
3763#undef __
3764
3765
3766static const byte kJnsInstruction = 0x79;
3767static const byte kJnsOffset = 0x11;
3768static const byte kNopByteOne = 0x66;
3769static const byte kNopByteTwo = 0x90;
3770#ifdef DEBUG
3771static const byte kCallInstruction = 0xe8;
3772#endif
3773
3774
3775void BackEdgeTable::PatchAt(Code* unoptimized_code,
3776 Address pc,
3777 BackEdgeState target_state,
3778 Code* replacement_code) {
3779 Address call_target_address = pc - kIntSize;
3780 Address jns_instr_address = call_target_address - 3;
3781 Address jns_offset_address = call_target_address - 2;
3782
3783 switch (target_state) {
3784 case INTERRUPT:
3785 // sub <profiling_counter>, <delta> ;; Not changed
3786 // jns ok
3787 // call <interrupt stub>
3788 // ok:
3789 *jns_instr_address = kJnsInstruction;
3790 *jns_offset_address = kJnsOffset;
3791 break;
3792 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003793 // sub <profiling_counter>, <delta> ;; Not changed
3794 // nop
3795 // nop
3796 // call <on-stack replacment>
3797 // ok:
3798 *jns_instr_address = kNopByteOne;
3799 *jns_offset_address = kNopByteTwo;
3800 break;
3801 }
3802
3803 Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3804 call_target_address, unoptimized_code,
3805 replacement_code->entry());
3806 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3807 unoptimized_code, call_target_address, replacement_code);
3808}
3809
3810
3811BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3812 Isolate* isolate,
3813 Code* unoptimized_code,
3814 Address pc) {
3815 Address call_target_address = pc - kIntSize;
3816 Address jns_instr_address = call_target_address - 3;
3817 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3818
3819 if (*jns_instr_address == kJnsInstruction) {
3820 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3821 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3822 Assembler::target_address_at(call_target_address,
3823 unoptimized_code));
3824 return INTERRUPT;
3825 }
3826
3827 DCHECK_EQ(kNopByteOne, *jns_instr_address);
3828 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3829
Ben Murdochda12d292016-06-02 14:46:10 +01003830 DCHECK_EQ(
3831 isolate->builtins()->OnStackReplacement()->entry(),
3832 Assembler::target_address_at(call_target_address, unoptimized_code));
3833 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003834}
3835
3836
3837} // namespace internal
3838} // namespace v8
3839
3840#endif // V8_TARGET_ARCH_X87