blob: 0d2107d0605e2069c0ba6b5373feebd6009998aa [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_S390
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/s390/code-stubs-s390.h"
17#include "src/s390/macro-assembler-s390.h"
18
19namespace v8 {
20namespace internal {
21
22#define __ ACCESS_MASM(masm())
23
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30// See PatchInlinedSmiCode in ic-s390.cc for the code that patches it
31class JumpPatchSite BASE_EMBEDDED {
32 public:
33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
34#ifdef DEBUG
35 info_emitted_ = false;
36#endif
37 }
38
39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
40
41 // When initially emitting this ensure that a jump is always generated to skip
42 // the inlined smi code.
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 DCHECK(!patch_site_.is_bound() && !info_emitted_);
45 __ bind(&patch_site_);
46 __ CmpP(reg, reg);
47// Emit the Nop to make bigger place for patching on 31-bit
48// as the TestIfSmi sequence uses 4-byte TMLL
49#ifndef V8_TARGET_ARCH_S390X
50 __ nop();
51#endif
52 __ beq(target); // Always taken before patched.
53 }
54
55 // When initially emitting this ensure that a jump is never generated to skip
56 // the inlined smi code.
57 void EmitJumpIfSmi(Register reg, Label* target) {
58 DCHECK(!patch_site_.is_bound() && !info_emitted_);
59 __ bind(&patch_site_);
60 __ CmpP(reg, reg);
61// Emit the Nop to make bigger place for patching on 31-bit
62// as the TestIfSmi sequence uses 4-byte TMLL
63#ifndef V8_TARGET_ARCH_S390X
64 __ nop();
65#endif
66 __ bne(target); // Never taken before patched.
67 }
68
69 void EmitPatchInfo() {
70 if (patch_site_.is_bound()) {
71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
72 DCHECK(is_int16(delta_to_patch_site));
73 __ chi(r0, Operand(delta_to_patch_site));
74#ifdef DEBUG
75 info_emitted_ = true;
76#endif
77 } else {
78 __ nop();
79 __ nop();
80 }
81 }
82
83 private:
84 MacroAssembler* masm() { return masm_; }
85 MacroAssembler* masm_;
86 Label patch_site_;
87#ifdef DEBUG
88 bool info_emitted_;
89#endif
90};
91
92// Generate code for a JS function. On entry to the function the receiver
93// and arguments have been pushed on the stack left to right. The actual
94// argument count matches the formal parameter count expected by the
95// function.
96//
97// The live registers are:
98// o r3: the JS function object being called (i.e., ourselves)
99// o r5: the new target value
100// o cp: our context
101// o fp: our caller's frame pointer
102// o sp: stack pointer
103// o lr: return address
104// o ip: our own function entry (required by the prologue)
105//
106// The function builds a JS frame. Please see JavaScriptFrameConstants in
107// frames-s390.h for its layout.
108void FullCodeGenerator::Generate() {
109 CompilationInfo* info = info_;
110 profiling_counter_ = isolate()->factory()->NewCell(
111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112 SetFunctionPosition(literal());
113 Comment cmnt(masm_, "[ function compiled by full code generator");
114
115 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
116
117 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
118 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
119 __ LoadP(r4, MemOperand(sp, receiver_offset), r0);
120 __ AssertNotSmi(r4);
121 __ CompareObjectType(r4, r4, no_reg, FIRST_JS_RECEIVER_TYPE);
122 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
123 }
124
125 // Open a frame scope to indicate that there is a frame on the stack. The
126 // MANUAL indicates that the scope shouldn't actually generate code to set up
127 // the frame (that is done below).
128 FrameScope frame_scope(masm_, StackFrame::MANUAL);
129 int prologue_offset = masm_->pc_offset();
130
131 info->set_prologue_offset(prologue_offset);
132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
133
134 {
135 Comment cmnt(masm_, "[ Allocate locals");
136 int locals_count = info->scope()->num_stack_slots();
137 // Generators allocate locals, if any, in context slots.
138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
139 OperandStackDepthIncrement(locals_count);
140 if (locals_count > 0) {
141 if (locals_count >= 128) {
142 Label ok;
143 __ AddP(ip, sp, Operand(-(locals_count * kPointerSize)));
144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
145 __ CmpLogicalP(ip, r5);
146 __ bge(&ok, Label::kNear);
147 __ CallRuntime(Runtime::kThrowStackOverflow);
148 __ bind(&ok);
149 }
150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
152 if (locals_count >= kMaxPushes) {
153 int loop_iterations = locals_count / kMaxPushes;
154 __ mov(r4, Operand(loop_iterations));
155 Label loop_header;
156 __ bind(&loop_header);
157 // Do pushes.
158 // TODO(joransiu): Use MVC for better performance
159 __ lay(sp, MemOperand(sp, -kMaxPushes * kPointerSize));
160 for (int i = 0; i < kMaxPushes; i++) {
161 __ StoreP(ip, MemOperand(sp, i * kPointerSize));
162 }
163 // Continue loop if not done.
164 __ BranchOnCount(r4, &loop_header);
165 }
166 int remaining = locals_count % kMaxPushes;
167 // Emit the remaining pushes.
168 // TODO(joransiu): Use MVC for better performance
169 if (remaining > 0) {
170 __ lay(sp, MemOperand(sp, -remaining * kPointerSize));
171 for (int i = 0; i < remaining; i++) {
172 __ StoreP(ip, MemOperand(sp, i * kPointerSize));
173 }
174 }
175 }
176 }
177
178 bool function_in_register_r3 = true;
179
180 // Possibly allocate a local context.
181 if (info->scope()->num_heap_slots() > 0) {
182 // Argument to NewContext is the function, which is still in r3.
183 Comment cmnt(masm_, "[ Allocate context");
184 bool need_write_barrier = true;
185 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186 if (info->scope()->is_script_scope()) {
187 __ push(r3);
188 __ Push(info->scope()->GetScopeInfo(info->isolate()));
189 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100190 PrepareForBailoutForId(BailoutId::ScriptContext(),
191 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100192 // The new target value is not used, clobbering is safe.
193 DCHECK_NULL(info->scope()->new_target_var());
194 } else {
195 if (info->scope()->new_target_var() != nullptr) {
196 __ push(r5); // Preserve new target.
197 }
198 if (slots <= FastNewContextStub::kMaximumSlots) {
199 FastNewContextStub stub(isolate(), slots);
200 __ CallStub(&stub);
201 // Result of FastNewContextStub is always in new space.
202 need_write_barrier = false;
203 } else {
204 __ push(r3);
205 __ CallRuntime(Runtime::kNewFunctionContext);
206 }
207 if (info->scope()->new_target_var() != nullptr) {
208 __ pop(r5); // Preserve new target.
209 }
210 }
211 function_in_register_r3 = false;
212 // Context is returned in r2. It replaces the context passed to us.
213 // It's saved in the stack and kept live in cp.
214 __ LoadRR(cp, r2);
215 __ StoreP(r2, MemOperand(fp, StandardFrameConstants::kContextOffset));
216 // Copy any necessary parameters into the context.
217 int num_parameters = info->scope()->num_parameters();
218 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
219 for (int i = first_parameter; i < num_parameters; i++) {
220 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
221 if (var->IsContextSlot()) {
222 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223 (num_parameters - 1 - i) * kPointerSize;
224 // Load parameter from stack.
225 __ LoadP(r2, MemOperand(fp, parameter_offset), r0);
226 // Store it in the context.
227 MemOperand target = ContextMemOperand(cp, var->index());
228 __ StoreP(r2, target);
229
230 // Update the write barrier.
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(cp, target.offset(), r2, r4,
233 kLRHasBeenSaved, kDontSaveFPRegs);
234 } else if (FLAG_debug_code) {
235 Label done;
236 __ JumpIfInNewSpace(cp, r2, &done);
237 __ Abort(kExpectedNewSpaceObject);
238 __ bind(&done);
239 }
240 }
241 }
242 }
243
244 // Register holding this function and new target are both trashed in case we
245 // bailout here. But since that can happen only when new target is not used
246 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100247 PrepareForBailoutForId(BailoutId::FunctionContext(),
248 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100249
250 // Possibly set up a local binding to the this function which is used in
251 // derived constructors with super calls.
252 Variable* this_function_var = scope()->this_function_var();
253 if (this_function_var != nullptr) {
254 Comment cmnt(masm_, "[ This function");
255 if (!function_in_register_r3) {
256 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
257 // The write barrier clobbers register again, keep it marked as such.
258 }
259 SetVar(this_function_var, r3, r2, r4);
260 }
261
262 // Possibly set up a local binding to the new target value.
263 Variable* new_target_var = scope()->new_target_var();
264 if (new_target_var != nullptr) {
265 Comment cmnt(masm_, "[ new.target");
266 SetVar(new_target_var, r5, r2, r4);
267 }
268
269 // Possibly allocate RestParameters
270 int rest_index;
271 Variable* rest_param = scope()->rest_parameter(&rest_index);
272 if (rest_param) {
273 Comment cmnt(masm_, "[ Allocate rest parameter array");
274
275 if (!function_in_register_r3) {
276 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
277 }
278 FastNewRestParameterStub stub(isolate());
279 __ CallStub(&stub);
280
281 function_in_register_r3 = false;
282 SetVar(rest_param, r2, r3, r4);
283 }
284
285 Variable* arguments = scope()->arguments();
286 if (arguments != NULL) {
287 // Function uses arguments object.
288 Comment cmnt(masm_, "[ Allocate arguments object");
289 if (!function_in_register_r3) {
290 // Load this again, if it's used by the local context below.
291 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
292 }
293 if (is_strict(language_mode()) || !has_simple_parameters()) {
294 FastNewStrictArgumentsStub stub(isolate());
295 __ CallStub(&stub);
296 } else if (literal()->has_duplicate_parameters()) {
297 __ Push(r3);
298 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
299 } else {
300 FastNewSloppyArgumentsStub stub(isolate());
301 __ CallStub(&stub);
302 }
303
304 SetVar(arguments, r2, r3, r4);
305 }
306
307 if (FLAG_trace) {
308 __ CallRuntime(Runtime::kTraceEnter);
309 }
310
311 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100312 PrepareForBailoutForId(BailoutId::FunctionEntry(),
313 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100314 {
315 Comment cmnt(masm_, "[ Declarations");
316 VisitDeclarations(scope()->declarations());
317 }
318
319 // Assert that the declarations do not use ICs. Otherwise the debugger
320 // won't be able to redirect a PC at an IC to the correct IC in newly
321 // recompiled code.
322 DCHECK_EQ(0, ic_total_count_);
323
324 {
325 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100326 PrepareForBailoutForId(BailoutId::Declarations(),
327 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100328 Label ok;
329 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
330 __ CmpLogicalP(sp, ip);
331 __ bge(&ok, Label::kNear);
332 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
333 __ bind(&ok);
334 }
335
336 {
337 Comment cmnt(masm_, "[ Body");
338 DCHECK(loop_depth() == 0);
339 VisitStatements(literal()->body());
340 DCHECK(loop_depth() == 0);
341 }
342
343 // Always emit a 'return undefined' in case control fell off the end of
344 // the body.
345 {
346 Comment cmnt(masm_, "[ return <undefined>;");
347 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
348 }
349 EmitReturnSequence();
350}
351
352void FullCodeGenerator::ClearAccumulator() {
353 __ LoadSmiLiteral(r2, Smi::FromInt(0));
354}
355
356void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
357 __ mov(r4, Operand(profiling_counter_));
358 intptr_t smi_delta = reinterpret_cast<intptr_t>(Smi::FromInt(delta));
359 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT) && is_int8(-smi_delta)) {
360 __ AddP(FieldMemOperand(r4, Cell::kValueOffset), Operand(-smi_delta));
361 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset));
362 } else {
363 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset));
364 __ SubSmiLiteral(r5, r5, Smi::FromInt(delta), r0);
365 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset));
366 }
367}
368
369void FullCodeGenerator::EmitProfilingCounterReset() {
370 int reset_value = FLAG_interrupt_budget;
371 __ mov(r4, Operand(profiling_counter_));
372 __ LoadSmiLiteral(r5, Smi::FromInt(reset_value));
373 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset));
374}
375
376void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
377 Label* back_edge_target) {
378 Comment cmnt(masm_, "[ Back edge bookkeeping");
379 Label ok;
380
381 DCHECK(back_edge_target->is_bound());
382 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
383 kCodeSizeMultiplier / 2;
384 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
385 EmitProfilingCounterDecrement(weight);
386 {
387 // BackEdgeTable::PatchAt manipulates this sequence.
388 __ bge(&ok, Label::kNear);
389 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
390
391 // Record a mapping of this PC offset to the OSR id. This is used to find
392 // the AST id from the unoptimized code in order to use it as a key into
393 // the deoptimization input data found in the optimized code.
394 RecordBackEdge(stmt->OsrEntryId());
395 }
396 EmitProfilingCounterReset();
397
398 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100399 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100400 // Record a mapping of the OSR id to this PC. This is used if the OSR
401 // entry becomes the target of a bailout. We don't expect it to be, but
402 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100403 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100404}
405
406void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
407 bool is_tail_call) {
408 // Pretend that the exit is a backwards jump to the entry.
409 int weight = 1;
410 if (info_->ShouldSelfOptimize()) {
411 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
412 } else {
413 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
414 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
415 }
416 EmitProfilingCounterDecrement(weight);
417 Label ok;
418 __ CmpP(r5, Operand::Zero());
419 __ bge(&ok);
420 // Don't need to save result register if we are going to do a tail call.
421 if (!is_tail_call) {
422 __ push(r2);
423 }
424 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
425 if (!is_tail_call) {
426 __ pop(r2);
427 }
428 EmitProfilingCounterReset();
429 __ bind(&ok);
430}
431
432void FullCodeGenerator::EmitReturnSequence() {
433 Comment cmnt(masm_, "[ Return sequence");
434 if (return_label_.is_bound()) {
435 __ b(&return_label_);
436 } else {
437 __ bind(&return_label_);
438 if (FLAG_trace) {
439 // Push the return value on the stack as the parameter.
440 // Runtime::TraceExit returns its parameter in r2
441 __ push(r2);
442 __ CallRuntime(Runtime::kTraceExit);
443 }
444 EmitProfilingCounterHandlingForReturnSequence(false);
445
446 // Make sure that the constant pool is not emitted inside of the return
447 // sequence.
448 {
449 // Here we use masm_-> instead of the __ macro to avoid the code coverage
450 // tool from instrumenting as we rely on the code size here.
451 int32_t arg_count = info_->scope()->num_parameters() + 1;
452 int32_t sp_delta = arg_count * kPointerSize;
453 SetReturnPosition(literal());
454 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
455
456 __ Ret();
457 }
458 }
459}
460
Ben Murdochc5610432016-08-08 18:44:38 +0100461void FullCodeGenerator::RestoreContext() {
462 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
463}
464
Ben Murdochda12d292016-06-02 14:46:10 +0100465void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
466 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
467 codegen()->GetVar(result_register(), var);
468 codegen()->PushOperand(result_register());
469}
470
471void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
472
473void FullCodeGenerator::AccumulatorValueContext::Plug(
474 Heap::RootListIndex index) const {
475 __ LoadRoot(result_register(), index);
476}
477
478void FullCodeGenerator::StackValueContext::Plug(
479 Heap::RootListIndex index) const {
480 __ LoadRoot(result_register(), index);
481 codegen()->PushOperand(result_register());
482}
483
484void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
485 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
486 false_label_);
487 if (index == Heap::kUndefinedValueRootIndex ||
488 index == Heap::kNullValueRootIndex ||
489 index == Heap::kFalseValueRootIndex) {
490 if (false_label_ != fall_through_) __ b(false_label_);
491 } else if (index == Heap::kTrueValueRootIndex) {
492 if (true_label_ != fall_through_) __ b(true_label_);
493 } else {
494 __ LoadRoot(result_register(), index);
495 codegen()->DoTest(this);
496 }
497}
498
499void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
500
501void FullCodeGenerator::AccumulatorValueContext::Plug(
502 Handle<Object> lit) const {
503 __ mov(result_register(), Operand(lit));
504}
505
506void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
507 // Immediates cannot be pushed directly.
508 __ mov(result_register(), Operand(lit));
509 codegen()->PushOperand(result_register());
510}
511
512void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
513 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
514 false_label_);
515 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
516 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
517 if (false_label_ != fall_through_) __ b(false_label_);
518 } else if (lit->IsTrue() || lit->IsJSObject()) {
519 if (true_label_ != fall_through_) __ b(true_label_);
520 } else if (lit->IsString()) {
521 if (String::cast(*lit)->length() == 0) {
522 if (false_label_ != fall_through_) __ b(false_label_);
523 } else {
524 if (true_label_ != fall_through_) __ b(true_label_);
525 }
526 } else if (lit->IsSmi()) {
527 if (Smi::cast(*lit)->value() == 0) {
528 if (false_label_ != fall_through_) __ b(false_label_);
529 } else {
530 if (true_label_ != fall_through_) __ b(true_label_);
531 }
532 } else {
533 // For simplicity we always test the accumulator register.
534 __ mov(result_register(), Operand(lit));
535 codegen()->DoTest(this);
536 }
537}
538
539void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
540 Register reg) const {
541 DCHECK(count > 0);
542 if (count > 1) codegen()->DropOperands(count - 1);
543 __ StoreP(reg, MemOperand(sp, 0));
544}
545
546void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
547 Label* materialize_false) const {
548 DCHECK(materialize_true == materialize_false);
549 __ bind(materialize_true);
550}
551
552void FullCodeGenerator::AccumulatorValueContext::Plug(
553 Label* materialize_true, Label* materialize_false) const {
554 Label done;
555 __ bind(materialize_true);
556 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
557 __ b(&done, Label::kNear);
558 __ bind(materialize_false);
559 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
560 __ bind(&done);
561}
562
563void FullCodeGenerator::StackValueContext::Plug(
564 Label* materialize_true, Label* materialize_false) const {
565 Label done;
566 __ bind(materialize_true);
567 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
568 __ b(&done, Label::kNear);
569 __ bind(materialize_false);
570 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
571 __ bind(&done);
572 codegen()->PushOperand(ip);
573}
574
575void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
576 Label* materialize_false) const {
577 DCHECK(materialize_true == true_label_);
578 DCHECK(materialize_false == false_label_);
579}
580
581void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
582 Heap::RootListIndex value_root_index =
583 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
584 __ LoadRoot(result_register(), value_root_index);
585}
586
587void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
588 Heap::RootListIndex value_root_index =
589 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
590 __ LoadRoot(ip, value_root_index);
591 codegen()->PushOperand(ip);
592}
593
594void FullCodeGenerator::TestContext::Plug(bool flag) const {
595 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
596 false_label_);
597 if (flag) {
598 if (true_label_ != fall_through_) __ b(true_label_);
599 } else {
600 if (false_label_ != fall_through_) __ b(false_label_);
601 }
602}
603
604void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
605 Label* if_false, Label* fall_through) {
606 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
607 CallIC(ic, condition->test_id());
608 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
609 Split(eq, if_true, if_false, fall_through);
610}
611
612void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
613 Label* fall_through) {
614 if (if_false == fall_through) {
615 __ b(cond, if_true);
616 } else if (if_true == fall_through) {
617 __ b(NegateCondition(cond), if_false);
618 } else {
619 __ b(cond, if_true);
620 __ b(if_false);
621 }
622}
623
624MemOperand FullCodeGenerator::StackOperand(Variable* var) {
625 DCHECK(var->IsStackAllocated());
626 // Offset is negative because higher indexes are at lower addresses.
627 int offset = -var->index() * kPointerSize;
628 // Adjust by a (parameter or local) base offset.
629 if (var->IsParameter()) {
630 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
631 } else {
632 offset += JavaScriptFrameConstants::kLocal0Offset;
633 }
634 return MemOperand(fp, offset);
635}
636
637MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
638 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
639 if (var->IsContextSlot()) {
640 int context_chain_length = scope()->ContextChainLength(var->scope());
641 __ LoadContext(scratch, context_chain_length);
642 return ContextMemOperand(scratch, var->index());
643 } else {
644 return StackOperand(var);
645 }
646}
647
648void FullCodeGenerator::GetVar(Register dest, Variable* var) {
649 // Use destination as scratch.
650 MemOperand location = VarOperand(var, dest);
651 __ LoadP(dest, location, r0);
652}
653
654void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
655 Register scratch1) {
656 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
657 DCHECK(!scratch0.is(src));
658 DCHECK(!scratch0.is(scratch1));
659 DCHECK(!scratch1.is(src));
660 MemOperand location = VarOperand(var, scratch0);
661 __ StoreP(src, location);
662
663 // Emit the write barrier code if the location is in the heap.
664 if (var->IsContextSlot()) {
665 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
666 kLRHasBeenSaved, kDontSaveFPRegs);
667 }
668}
669
670void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
671 bool should_normalize,
672 Label* if_true,
673 Label* if_false) {
674 // Only prepare for bailouts before splits if we're in a test
675 // context. Otherwise, we let the Visit function deal with the
676 // preparation to avoid preparing with the same AST id twice.
677 if (!context()->IsTest()) return;
678
679 Label skip;
680 if (should_normalize) __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100681 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100682 if (should_normalize) {
683 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
684 Split(eq, if_true, if_false, NULL);
685 __ bind(&skip);
686 }
687}
688
689void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
690 // The variable in the declaration always resides in the current function
691 // context.
692 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
693 if (FLAG_debug_code) {
694 // Check that we're not inside a with or catch context.
695 __ LoadP(r3, FieldMemOperand(cp, HeapObject::kMapOffset));
696 __ CompareRoot(r3, Heap::kWithContextMapRootIndex);
697 __ Check(ne, kDeclarationInWithContext);
698 __ CompareRoot(r3, Heap::kCatchContextMapRootIndex);
699 __ Check(ne, kDeclarationInCatchContext);
700 }
701}
702
703void FullCodeGenerator::VisitVariableDeclaration(
704 VariableDeclaration* declaration) {
705 // If it was not possible to allocate the variable at compile time, we
706 // need to "declare" it at runtime to make sure it actually exists in the
707 // local context.
708 VariableProxy* proxy = declaration->proxy();
709 VariableMode mode = declaration->mode();
710 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100711 bool hole_init = mode == LET || mode == CONST;
Ben Murdochda12d292016-06-02 14:46:10 +0100712 switch (variable->location()) {
713 case VariableLocation::GLOBAL:
714 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100715 DCHECK(!variable->binding_needs_init());
Ben Murdochda12d292016-06-02 14:46:10 +0100716 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100717 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdochda12d292016-06-02 14:46:10 +0100718 break;
719
720 case VariableLocation::PARAMETER:
721 case VariableLocation::LOCAL:
722 if (hole_init) {
723 Comment cmnt(masm_, "[ VariableDeclaration");
724 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
725 __ StoreP(ip, StackOperand(variable));
726 }
727 break;
728
729 case VariableLocation::CONTEXT:
730 if (hole_init) {
731 Comment cmnt(masm_, "[ VariableDeclaration");
732 EmitDebugCheckDeclarationContext(variable);
733 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
734 __ StoreP(ip, ContextMemOperand(cp, variable->index()));
735 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100736 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100737 }
738 break;
739
740 case VariableLocation::LOOKUP: {
741 Comment cmnt(masm_, "[ VariableDeclaration");
742 __ mov(r4, Operand(variable->name()));
743 // Declaration nodes are always introduced in one of four modes.
744 DCHECK(IsDeclaredVariableMode(mode));
745 // Push initial value, if any.
746 // Note: For variables we must not push an initial value (such as
747 // 'undefined') because we may have a (legal) redeclaration and we
748 // must not destroy the current value.
749 if (hole_init) {
750 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
751 } else {
752 __ LoadSmiLiteral(r2, Smi::FromInt(0)); // Indicates no initial value.
753 }
754 __ Push(r4, r2);
755 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
756 __ CallRuntime(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100757 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100758 break;
759 }
760 }
761}
762
763void FullCodeGenerator::VisitFunctionDeclaration(
764 FunctionDeclaration* declaration) {
765 VariableProxy* proxy = declaration->proxy();
766 Variable* variable = proxy->var();
767 switch (variable->location()) {
768 case VariableLocation::GLOBAL:
769 case VariableLocation::UNALLOCATED: {
770 globals_->Add(variable->name(), zone());
771 Handle<SharedFunctionInfo> function =
772 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
773 // Check for stack-overflow exception.
774 if (function.is_null()) return SetStackOverflow();
775 globals_->Add(function, zone());
776 break;
777 }
778
779 case VariableLocation::PARAMETER:
780 case VariableLocation::LOCAL: {
781 Comment cmnt(masm_, "[ FunctionDeclaration");
782 VisitForAccumulatorValue(declaration->fun());
783 __ StoreP(result_register(), StackOperand(variable));
784 break;
785 }
786
787 case VariableLocation::CONTEXT: {
788 Comment cmnt(masm_, "[ FunctionDeclaration");
789 EmitDebugCheckDeclarationContext(variable);
790 VisitForAccumulatorValue(declaration->fun());
791 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()));
792 int offset = Context::SlotOffset(variable->index());
793 // We know that we have written a function, which is not a smi.
794 __ RecordWriteContextSlot(cp, offset, result_register(), r4,
795 kLRHasBeenSaved, kDontSaveFPRegs,
796 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100797 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100798 break;
799 }
800
801 case VariableLocation::LOOKUP: {
802 Comment cmnt(masm_, "[ FunctionDeclaration");
803 __ mov(r4, Operand(variable->name()));
804 PushOperand(r4);
805 // Push initial value for function declaration.
806 VisitForStackValue(declaration->fun());
807 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
808 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100809 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100810 break;
811 }
812 }
813}
814
815void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
816 // Call the runtime to declare the globals.
817 __ mov(r3, Operand(pairs));
818 __ LoadSmiLiteral(r2, Smi::FromInt(DeclareGlobalsFlags()));
819 __ Push(r3, r2);
820 __ CallRuntime(Runtime::kDeclareGlobals);
821 // Return value is ignored.
822}
823
824void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
825 // Call the runtime to declare the modules.
826 __ Push(descriptions);
827 __ CallRuntime(Runtime::kDeclareModules);
828 // Return value is ignored.
829}
830
831void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
832 Comment cmnt(masm_, "[ SwitchStatement");
833 Breakable nested_statement(this, stmt);
834 SetStatementPosition(stmt);
835
836 // Keep the switch value on the stack until a case matches.
837 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100838 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100839
840 ZoneList<CaseClause*>* clauses = stmt->cases();
841 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
842
843 Label next_test; // Recycled for each test.
844 // Compile all the tests with branches to their bodies.
845 for (int i = 0; i < clauses->length(); i++) {
846 CaseClause* clause = clauses->at(i);
847 clause->body_target()->Unuse();
848
849 // The default is not a test, but remember it as final fall through.
850 if (clause->is_default()) {
851 default_clause = clause;
852 continue;
853 }
854
855 Comment cmnt(masm_, "[ Case comparison");
856 __ bind(&next_test);
857 next_test.Unuse();
858
859 // Compile the label expression.
860 VisitForAccumulatorValue(clause->label());
861
862 // Perform the comparison as if via '==='.
863 __ LoadP(r3, MemOperand(sp, 0)); // Switch value.
864 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
865 JumpPatchSite patch_site(masm_);
866 if (inline_smi_code) {
867 Label slow_case;
868 __ LoadRR(r4, r2);
869 __ OrP(r4, r3);
870 patch_site.EmitJumpIfNotSmi(r4, &slow_case);
871
872 __ CmpP(r3, r2);
873 __ bne(&next_test);
874 __ Drop(1); // Switch value is no longer needed.
875 __ b(clause->body_target());
876 __ bind(&slow_case);
877 }
878
879 // Record position before stub call for type feedback.
880 SetExpressionPosition(clause);
881 Handle<Code> ic =
882 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
883 CallIC(ic, clause->CompareId());
884 patch_site.EmitPatchInfo();
885
886 Label skip;
887 __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100888 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100889 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
890 __ bne(&next_test);
891 __ Drop(1);
892 __ b(clause->body_target());
893 __ bind(&skip);
894
895 __ CmpP(r2, Operand::Zero());
896 __ bne(&next_test);
897 __ Drop(1); // Switch value is no longer needed.
898 __ b(clause->body_target());
899 }
900
901 // Discard the test value and jump to the default if present, otherwise to
902 // the end of the statement.
903 __ bind(&next_test);
904 DropOperands(1); // Switch value is no longer needed.
905 if (default_clause == NULL) {
906 __ b(nested_statement.break_label());
907 } else {
908 __ b(default_clause->body_target());
909 }
910
911 // Compile all the case bodies.
912 for (int i = 0; i < clauses->length(); i++) {
913 Comment cmnt(masm_, "[ Case body");
914 CaseClause* clause = clauses->at(i);
915 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100916 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100917 VisitStatements(clause->statements());
918 }
919
920 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100921 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100922}
923
924void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
925 Comment cmnt(masm_, "[ ForInStatement");
926 SetStatementPosition(stmt, SKIP_BREAK);
927
928 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
929
930 // Get the object to enumerate over.
931 SetExpressionAsStatementPosition(stmt->enumerable());
932 VisitForAccumulatorValue(stmt->enumerable());
933 OperandStackDepthIncrement(5);
934
935 Label loop, exit;
936 Iteration loop_statement(this, stmt);
937 increment_loop_depth();
938
939 // If the object is null or undefined, skip over the loop, otherwise convert
940 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
941 Label convert, done_convert;
942 __ JumpIfSmi(r2, &convert);
943 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE);
944 __ bge(&done_convert);
945 __ CompareRoot(r2, Heap::kNullValueRootIndex);
946 __ beq(&exit);
947 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
948 __ beq(&exit);
949 __ bind(&convert);
950 ToObjectStub stub(isolate());
951 __ CallStub(&stub);
952 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +0100953 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100954 __ push(r2);
955
Ben Murdochc5610432016-08-08 18:44:38 +0100956 // Check cache validity in generated code. If we cannot guarantee cache
957 // validity, call the runtime system to check cache validity or get the
958 // property names in a fixed array. Note: Proxies never have an enum cache,
959 // so will always take the slow path.
Ben Murdochda12d292016-06-02 14:46:10 +0100960 Label call_runtime;
961 __ CheckEnumCache(&call_runtime);
962
963 // The enum cache is valid. Load the map of the object being
964 // iterated over and use the cache for the iteration.
965 Label use_cache;
966 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
967 __ b(&use_cache);
968
969 // Get the set of properties to enumerate.
970 __ bind(&call_runtime);
971 __ push(r2); // Duplicate the enumerable object on the stack.
972 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +0100973 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100974
975 // If we got a map from the runtime call, we can do a fast
976 // modification check. Otherwise, we got a fixed array, and we have
977 // to do a slow check.
978 Label fixed_array;
979 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
980 __ CompareRoot(r4, Heap::kMetaMapRootIndex);
981 __ bne(&fixed_array);
982
983 // We got a map in register r2. Get the enumeration cache from it.
984 Label no_descriptors;
985 __ bind(&use_cache);
986
987 __ EnumLength(r3, r2);
988 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
989 __ beq(&no_descriptors, Label::kNear);
990
991 __ LoadInstanceDescriptors(r2, r4);
992 __ LoadP(r4, FieldMemOperand(r4, DescriptorArray::kEnumCacheOffset));
993 __ LoadP(r4,
994 FieldMemOperand(r4, DescriptorArray::kEnumCacheBridgeCacheOffset));
995
996 // Set up the four remaining stack slots.
997 __ push(r2); // Map.
998 __ LoadSmiLiteral(r2, Smi::FromInt(0));
999 // Push enumeration cache, enumeration cache length (as smi) and zero.
1000 __ Push(r4, r3, r2);
1001 __ b(&loop);
1002
1003 __ bind(&no_descriptors);
1004 __ Drop(1);
1005 __ b(&exit);
1006
1007 // We got a fixed array in register r2. Iterate through that.
1008 __ bind(&fixed_array);
1009
1010 __ LoadSmiLiteral(r3, Smi::FromInt(1)); // Smi(1) indicates slow check
1011 __ Push(r3, r2); // Smi and array
1012 __ LoadP(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
1013 __ Push(r3); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001014 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001015 __ LoadSmiLiteral(r2, Smi::FromInt(0));
1016 __ Push(r2); // Initial index.
1017
1018 // Generate code for doing the condition check.
1019 __ bind(&loop);
1020 SetExpressionAsStatementPosition(stmt->each());
1021
1022 // Load the current count to r2, load the length to r3.
1023 __ LoadP(r2, MemOperand(sp, 0 * kPointerSize));
1024 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1025 __ CmpLogicalP(r2, r3); // Compare to the array length.
1026 __ bge(loop_statement.break_label());
1027
1028 // Get the current entry of the array into register r5.
1029 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
1030 __ AddP(r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1031 __ SmiToPtrArrayOffset(r5, r2);
1032 __ LoadP(r5, MemOperand(r5, r4));
1033
1034 // Get the expected map from the stack or a smi in the
1035 // permanent slow case into register r4.
1036 __ LoadP(r4, MemOperand(sp, 3 * kPointerSize));
1037
1038 // Check if the expected map still matches that of the enumerable.
1039 // If not, we may have to filter the key.
1040 Label update_each;
1041 __ LoadP(r3, MemOperand(sp, 4 * kPointerSize));
1042 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1043 __ CmpP(r6, r4);
1044 __ beq(&update_each);
1045
1046 // We need to filter the key, record slow-path here.
1047 int const vector_index = SmiFromSlot(slot)->value();
1048 __ EmitLoadTypeFeedbackVector(r2);
1049 __ mov(r4, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1050 __ StoreP(
1051 r4, FieldMemOperand(r2, FixedArray::OffsetOfElementAt(vector_index)), r0);
1052
1053 // Convert the entry to a string or (smi) 0 if it isn't a property
1054 // any more. If the property has been removed while iterating, we
1055 // just skip it.
1056 __ Push(r3, r5); // Enumerable and current entry.
1057 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001058 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001059 __ LoadRR(r5, r2);
1060 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1061 __ CmpP(r2, r0);
1062 __ beq(loop_statement.continue_label());
1063
1064 // Update the 'each' property or variable from the possibly filtered
1065 // entry in register r5.
1066 __ bind(&update_each);
1067 __ LoadRR(result_register(), r5);
1068 // Perform the assignment as if via '='.
1069 {
1070 EffectContext context(this);
1071 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001072 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001073 }
1074
1075 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001076 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001077 // Generate code for the body of the loop.
1078 Visit(stmt->body());
1079
1080 // Generate code for the going to the next element by incrementing
1081 // the index (smi) stored on top of the stack.
1082 __ bind(loop_statement.continue_label());
1083 __ pop(r2);
1084 __ AddSmiLiteral(r2, r2, Smi::FromInt(1), r0);
1085 __ push(r2);
1086
1087 EmitBackEdgeBookkeeping(stmt, &loop);
1088 __ b(&loop);
1089
1090 // Remove the pointers stored on the stack.
1091 __ bind(loop_statement.break_label());
1092 DropOperands(5);
1093
1094 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001095 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001096 __ bind(&exit);
1097 decrement_loop_depth();
1098}
1099
1100void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1101 FeedbackVectorSlot slot) {
1102 DCHECK(NeedsHomeObject(initializer));
1103 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1104 __ mov(StoreDescriptor::NameRegister(),
1105 Operand(isolate()->factory()->home_object_symbol()));
1106 __ LoadP(StoreDescriptor::ValueRegister(),
1107 MemOperand(sp, offset * kPointerSize));
1108 EmitLoadStoreICSlot(slot);
1109 CallStoreIC();
1110}
1111
1112void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1113 int offset,
1114 FeedbackVectorSlot slot) {
1115 DCHECK(NeedsHomeObject(initializer));
1116 __ Move(StoreDescriptor::ReceiverRegister(), r2);
1117 __ mov(StoreDescriptor::NameRegister(),
1118 Operand(isolate()->factory()->home_object_symbol()));
1119 __ LoadP(StoreDescriptor::ValueRegister(),
1120 MemOperand(sp, offset * kPointerSize));
1121 EmitLoadStoreICSlot(slot);
1122 CallStoreIC();
1123}
1124
1125void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1126 TypeofMode typeof_mode,
1127 Label* slow) {
1128 Register current = cp;
1129 Register next = r3;
1130 Register temp = r4;
1131
1132 Scope* s = scope();
1133 while (s != NULL) {
1134 if (s->num_heap_slots() > 0) {
1135 if (s->calls_sloppy_eval()) {
1136 // Check that extension is "the hole".
1137 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1138 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1139 }
1140 // Load next context in chain.
1141 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1142 // Walk the rest of the chain without clobbering cp.
1143 current = next;
1144 }
1145 // If no outer scope calls eval, we do not need to check more
1146 // context extensions.
1147 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1148 s = s->outer_scope();
1149 }
1150
1151 if (s->is_eval_scope()) {
1152 Label loop, fast;
1153 if (!current.is(next)) {
1154 __ Move(next, current);
1155 }
1156 __ bind(&loop);
1157 // Terminate at native context.
1158 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1159 __ CompareRoot(temp, Heap::kNativeContextMapRootIndex);
1160 __ beq(&fast, Label::kNear);
1161 // Check that extension is "the hole".
1162 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1163 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1164 // Load next context in chain.
1165 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1166 __ b(&loop);
1167 __ bind(&fast);
1168 }
1169
1170 // All extension objects were empty and it is safe to use a normal global
1171 // load machinery.
1172 EmitGlobalVariableLoad(proxy, typeof_mode);
1173}
1174
1175MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1176 Label* slow) {
1177 DCHECK(var->IsContextSlot());
1178 Register context = cp;
1179 Register next = r5;
1180 Register temp = r6;
1181
1182 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1183 if (s->num_heap_slots() > 0) {
1184 if (s->calls_sloppy_eval()) {
1185 // Check that extension is "the hole".
1186 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1187 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1188 }
1189 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1190 // Walk the rest of the chain without clobbering cp.
1191 context = next;
1192 }
1193 }
1194 // Check that last extension is "the hole".
1195 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1196 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1197
1198 // This function is used only for loads, not stores, so it's safe to
1199 // return an cp-based operand (the write barrier cannot be allowed to
1200 // destroy the cp register).
1201 return ContextMemOperand(context, var->index());
1202}
1203
1204void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1205 TypeofMode typeof_mode,
1206 Label* slow, Label* done) {
1207 // Generate fast-case code for variables that might be shadowed by
1208 // eval-introduced variables. Eval is used a lot without
1209 // introducing variables. In those cases, we do not want to
1210 // perform a runtime call for all variables in the scope
1211 // containing the eval.
1212 Variable* var = proxy->var();
1213 if (var->mode() == DYNAMIC_GLOBAL) {
1214 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1215 __ b(done);
1216 } else if (var->mode() == DYNAMIC_LOCAL) {
1217 Variable* local = var->local_if_not_shadowed();
1218 __ LoadP(r2, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001219 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdochda12d292016-06-02 14:46:10 +01001220 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1221 __ bne(done);
Ben Murdochc5610432016-08-08 18:44:38 +01001222 __ mov(r2, Operand(var->name()));
1223 __ push(r2);
1224 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdochda12d292016-06-02 14:46:10 +01001225 }
1226 __ b(done);
1227 }
1228}
1229
1230void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1231 TypeofMode typeof_mode) {
1232 Variable* var = proxy->var();
1233 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1234 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1235 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1236 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1237 __ mov(LoadDescriptor::SlotRegister(),
1238 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1239 CallLoadIC(typeof_mode);
1240}
1241
1242void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1243 TypeofMode typeof_mode) {
1244 // Record position before possible IC call.
1245 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001246 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001247 Variable* var = proxy->var();
1248
1249 // Three cases: global variables, lookup variables, and all other types of
1250 // variables.
1251 switch (var->location()) {
1252 case VariableLocation::GLOBAL:
1253 case VariableLocation::UNALLOCATED: {
1254 Comment cmnt(masm_, "[ Global variable");
1255 EmitGlobalVariableLoad(proxy, typeof_mode);
1256 context()->Plug(r2);
1257 break;
1258 }
1259
1260 case VariableLocation::PARAMETER:
1261 case VariableLocation::LOCAL:
1262 case VariableLocation::CONTEXT: {
1263 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1264 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1265 : "[ Stack variable");
1266 if (NeedsHoleCheckForLoad(proxy)) {
1267 Label done;
1268 // Let and const need a read barrier.
1269 GetVar(r2, var);
1270 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1271 __ bne(&done);
1272 if (var->mode() == LET || var->mode() == CONST) {
1273 // Throw a reference error when using an uninitialized let/const
1274 // binding in harmony mode.
1275 __ mov(r2, Operand(var->name()));
1276 __ push(r2);
1277 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdochda12d292016-06-02 14:46:10 +01001278 }
1279 __ bind(&done);
1280 context()->Plug(r2);
1281 break;
1282 }
1283 context()->Plug(var);
1284 break;
1285 }
1286
1287 case VariableLocation::LOOKUP: {
1288 Comment cmnt(masm_, "[ Lookup variable");
1289 Label done, slow;
1290 // Generate code for loading from variables potentially shadowed
1291 // by eval-introduced variables.
1292 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1293 __ bind(&slow);
1294 __ Push(var->name());
1295 Runtime::FunctionId function_id =
1296 typeof_mode == NOT_INSIDE_TYPEOF
1297 ? Runtime::kLoadLookupSlot
1298 : Runtime::kLoadLookupSlotInsideTypeof;
1299 __ CallRuntime(function_id);
1300 __ bind(&done);
1301 context()->Plug(r2);
1302 }
1303 }
1304}
1305
1306void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1307 Comment cmnt(masm_, "[ RegExpLiteral");
1308 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1309 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index()));
1310 __ mov(r3, Operand(expr->pattern()));
1311 __ LoadSmiLiteral(r2, Smi::FromInt(expr->flags()));
1312 FastCloneRegExpStub stub(isolate());
1313 __ CallStub(&stub);
1314 context()->Plug(r2);
1315}
1316
1317void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1318 Expression* expression = (property == NULL) ? NULL : property->value();
1319 if (expression == NULL) {
1320 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1321 PushOperand(r3);
1322 } else {
1323 VisitForStackValue(expression);
1324 if (NeedsHomeObject(expression)) {
1325 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1326 property->kind() == ObjectLiteral::Property::SETTER);
1327 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1328 EmitSetHomeObject(expression, offset, property->GetSlot());
1329 }
1330 }
1331}
1332
1333void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1334 Comment cmnt(masm_, "[ ObjectLiteral");
1335
1336 Handle<FixedArray> constant_properties = expr->constant_properties();
1337 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1338 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index()));
1339 __ mov(r3, Operand(constant_properties));
1340 int flags = expr->ComputeFlags();
1341 __ LoadSmiLiteral(r2, Smi::FromInt(flags));
1342 if (MustCreateObjectLiteralWithRuntime(expr)) {
1343 __ Push(r5, r4, r3, r2);
1344 __ CallRuntime(Runtime::kCreateObjectLiteral);
1345 } else {
1346 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1347 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001348 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001349 }
Ben Murdochc5610432016-08-08 18:44:38 +01001350 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001351
1352 // If result_saved is true the result is on top of the stack. If
1353 // result_saved is false the result is in r2.
1354 bool result_saved = false;
1355
1356 AccessorTable accessor_table(zone());
1357 int property_index = 0;
1358 for (; property_index < expr->properties()->length(); property_index++) {
1359 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1360 if (property->is_computed_name()) break;
1361 if (property->IsCompileTimeValue()) continue;
1362
1363 Literal* key = property->key()->AsLiteral();
1364 Expression* value = property->value();
1365 if (!result_saved) {
1366 PushOperand(r2); // Save result on stack
1367 result_saved = true;
1368 }
1369 switch (property->kind()) {
1370 case ObjectLiteral::Property::CONSTANT:
1371 UNREACHABLE();
1372 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1373 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1374 // Fall through.
1375 case ObjectLiteral::Property::COMPUTED:
1376 // It is safe to use [[Put]] here because the boilerplate already
1377 // contains computed properties with an uninitialized value.
1378 if (key->value()->IsInternalizedString()) {
1379 if (property->emit_store()) {
1380 VisitForAccumulatorValue(value);
1381 DCHECK(StoreDescriptor::ValueRegister().is(r2));
1382 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1383 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1384 EmitLoadStoreICSlot(property->GetSlot(0));
1385 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001386 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001387
1388 if (NeedsHomeObject(value)) {
1389 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1390 }
1391 } else {
1392 VisitForEffect(value);
1393 }
1394 break;
1395 }
1396 // Duplicate receiver on stack.
1397 __ LoadP(r2, MemOperand(sp));
1398 PushOperand(r2);
1399 VisitForStackValue(key);
1400 VisitForStackValue(value);
1401 if (property->emit_store()) {
1402 if (NeedsHomeObject(value)) {
1403 EmitSetHomeObject(value, 2, property->GetSlot());
1404 }
1405 __ LoadSmiLiteral(r2, Smi::FromInt(SLOPPY)); // PropertyAttributes
1406 PushOperand(r2);
1407 CallRuntimeWithOperands(Runtime::kSetProperty);
1408 } else {
1409 DropOperands(3);
1410 }
1411 break;
1412 case ObjectLiteral::Property::PROTOTYPE:
1413 // Duplicate receiver on stack.
1414 __ LoadP(r2, MemOperand(sp));
1415 PushOperand(r2);
1416 VisitForStackValue(value);
1417 DCHECK(property->emit_store());
1418 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1419 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001420 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001421 break;
1422 case ObjectLiteral::Property::GETTER:
1423 if (property->emit_store()) {
1424 accessor_table.lookup(key)->second->getter = property;
1425 }
1426 break;
1427 case ObjectLiteral::Property::SETTER:
1428 if (property->emit_store()) {
1429 accessor_table.lookup(key)->second->setter = property;
1430 }
1431 break;
1432 }
1433 }
1434
1435 // Emit code to define accessors, using only a single call to the runtime for
1436 // each pair of corresponding getters and setters.
1437 for (AccessorTable::Iterator it = accessor_table.begin();
1438 it != accessor_table.end(); ++it) {
1439 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver.
1440 PushOperand(r2);
1441 VisitForStackValue(it->first);
1442 EmitAccessor(it->second->getter);
1443 EmitAccessor(it->second->setter);
1444 __ LoadSmiLiteral(r2, Smi::FromInt(NONE));
1445 PushOperand(r2);
1446 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1447 }
1448
1449 // Object literals have two parts. The "static" part on the left contains no
1450 // computed property names, and so we can compute its map ahead of time; see
1451 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1452 // starts with the first computed property name, and continues with all
1453 // properties to its right. All the code from above initializes the static
1454 // component of the object literal, and arranges for the map of the result to
1455 // reflect the static order in which the keys appear. For the dynamic
1456 // properties, we compile them into a series of "SetOwnProperty" runtime
1457 // calls. This will preserve insertion order.
1458 for (; property_index < expr->properties()->length(); property_index++) {
1459 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1460
1461 Expression* value = property->value();
1462 if (!result_saved) {
1463 PushOperand(r2); // Save result on the stack
1464 result_saved = true;
1465 }
1466
1467 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver.
1468 PushOperand(r2);
1469
1470 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1471 DCHECK(!property->is_computed_name());
1472 VisitForStackValue(value);
1473 DCHECK(property->emit_store());
1474 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1475 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001476 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001477 } else {
1478 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1479 VisitForStackValue(value);
1480 if (NeedsHomeObject(value)) {
1481 EmitSetHomeObject(value, 2, property->GetSlot());
1482 }
1483
1484 switch (property->kind()) {
1485 case ObjectLiteral::Property::CONSTANT:
1486 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1487 case ObjectLiteral::Property::COMPUTED:
1488 if (property->emit_store()) {
1489 PushOperand(Smi::FromInt(NONE));
1490 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1491 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1492 } else {
1493 DropOperands(3);
1494 }
1495 break;
1496
1497 case ObjectLiteral::Property::PROTOTYPE:
1498 UNREACHABLE();
1499 break;
1500
1501 case ObjectLiteral::Property::GETTER:
1502 PushOperand(Smi::FromInt(NONE));
1503 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1504 break;
1505
1506 case ObjectLiteral::Property::SETTER:
1507 PushOperand(Smi::FromInt(NONE));
1508 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1509 break;
1510 }
1511 }
1512 }
1513
1514 if (result_saved) {
1515 context()->PlugTOS();
1516 } else {
1517 context()->Plug(r2);
1518 }
1519}
1520
1521void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1522 Comment cmnt(masm_, "[ ArrayLiteral");
1523
1524 Handle<FixedArray> constant_elements = expr->constant_elements();
1525 bool has_fast_elements =
1526 IsFastObjectElementsKind(expr->constant_elements_kind());
1527 Handle<FixedArrayBase> constant_elements_values(
1528 FixedArrayBase::cast(constant_elements->get(1)));
1529
1530 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1531 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1532 // If the only customer of allocation sites is transitioning, then
1533 // we can turn it off if we don't have anywhere else to transition to.
1534 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1535 }
1536
1537 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1538 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index()));
1539 __ mov(r3, Operand(constant_elements));
1540 if (MustCreateArrayLiteralWithRuntime(expr)) {
1541 __ LoadSmiLiteral(r2, Smi::FromInt(expr->ComputeFlags()));
1542 __ Push(r5, r4, r3, r2);
1543 __ CallRuntime(Runtime::kCreateArrayLiteral);
1544 } else {
1545 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1546 __ CallStub(&stub);
1547 }
Ben Murdochc5610432016-08-08 18:44:38 +01001548 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001549
1550 bool result_saved = false; // Is the result saved to the stack?
1551 ZoneList<Expression*>* subexprs = expr->values();
1552 int length = subexprs->length();
1553
1554 // Emit code to evaluate all the non-constant subexpressions and to store
1555 // them into the newly cloned array.
1556 int array_index = 0;
1557 for (; array_index < length; array_index++) {
1558 Expression* subexpr = subexprs->at(array_index);
1559 DCHECK(!subexpr->IsSpread());
1560 // If the subexpression is a literal or a simple materialized literal it
1561 // is already set in the cloned array.
1562 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1563
1564 if (!result_saved) {
1565 PushOperand(r2);
1566 result_saved = true;
1567 }
1568 VisitForAccumulatorValue(subexpr);
1569
1570 __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1571 Smi::FromInt(array_index));
1572 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1573 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1574 Handle<Code> ic =
1575 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1576 CallIC(ic);
1577
Ben Murdochc5610432016-08-08 18:44:38 +01001578 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1579 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001580 }
1581
1582 // In case the array literal contains spread expressions it has two parts. The
1583 // first part is the "static" array which has a literal index is handled
1584 // above. The second part is the part after the first spread expression
1585 // (inclusive) and these elements gets appended to the array. Note that the
1586 // number elements an iterable produces is unknown ahead of time.
1587 if (array_index < length && result_saved) {
1588 PopOperand(r2);
1589 result_saved = false;
1590 }
1591 for (; array_index < length; array_index++) {
1592 Expression* subexpr = subexprs->at(array_index);
1593
1594 PushOperand(r2);
1595 DCHECK(!subexpr->IsSpread());
1596 VisitForStackValue(subexpr);
1597 CallRuntimeWithOperands(Runtime::kAppendElement);
1598
Ben Murdochc5610432016-08-08 18:44:38 +01001599 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1600 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001601 }
1602
1603 if (result_saved) {
1604 context()->PlugTOS();
1605 } else {
1606 context()->Plug(r2);
1607 }
1608}
1609
1610void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1611 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1612
1613 Comment cmnt(masm_, "[ Assignment");
Ben Murdochda12d292016-06-02 14:46:10 +01001614
1615 Property* property = expr->target()->AsProperty();
1616 LhsKind assign_type = Property::GetAssignType(property);
1617
1618 // Evaluate LHS expression.
1619 switch (assign_type) {
1620 case VARIABLE:
1621 // Nothing to do here.
1622 break;
1623 case NAMED_PROPERTY:
1624 if (expr->is_compound()) {
1625 // We need the receiver both on the stack and in the register.
1626 VisitForStackValue(property->obj());
1627 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1628 } else {
1629 VisitForStackValue(property->obj());
1630 }
1631 break;
1632 case NAMED_SUPER_PROPERTY:
1633 VisitForStackValue(
1634 property->obj()->AsSuperPropertyReference()->this_var());
1635 VisitForAccumulatorValue(
1636 property->obj()->AsSuperPropertyReference()->home_object());
1637 PushOperand(result_register());
1638 if (expr->is_compound()) {
1639 const Register scratch = r3;
1640 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1641 PushOperands(scratch, result_register());
1642 }
1643 break;
1644 case KEYED_SUPER_PROPERTY: {
1645 const Register scratch = r3;
1646 VisitForStackValue(
1647 property->obj()->AsSuperPropertyReference()->this_var());
1648 VisitForAccumulatorValue(
1649 property->obj()->AsSuperPropertyReference()->home_object());
1650 __ LoadRR(scratch, result_register());
1651 VisitForAccumulatorValue(property->key());
1652 PushOperands(scratch, result_register());
1653 if (expr->is_compound()) {
1654 const Register scratch1 = r4;
1655 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1656 PushOperands(scratch1, scratch, result_register());
1657 }
1658 break;
1659 }
1660 case KEYED_PROPERTY:
1661 if (expr->is_compound()) {
1662 VisitForStackValue(property->obj());
1663 VisitForStackValue(property->key());
1664 __ LoadP(LoadDescriptor::ReceiverRegister(),
1665 MemOperand(sp, 1 * kPointerSize));
1666 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1667 } else {
1668 VisitForStackValue(property->obj());
1669 VisitForStackValue(property->key());
1670 }
1671 break;
1672 }
1673
1674 // For compound assignments we need another deoptimization point after the
1675 // variable/property load.
1676 if (expr->is_compound()) {
1677 {
1678 AccumulatorValueContext context(this);
1679 switch (assign_type) {
1680 case VARIABLE:
1681 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001682 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001683 break;
1684 case NAMED_PROPERTY:
1685 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001686 PrepareForBailoutForId(property->LoadId(),
1687 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001688 break;
1689 case NAMED_SUPER_PROPERTY:
1690 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001691 PrepareForBailoutForId(property->LoadId(),
1692 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001693 break;
1694 case KEYED_SUPER_PROPERTY:
1695 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001696 PrepareForBailoutForId(property->LoadId(),
1697 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001698 break;
1699 case KEYED_PROPERTY:
1700 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001701 PrepareForBailoutForId(property->LoadId(),
1702 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001703 break;
1704 }
1705 }
1706
1707 Token::Value op = expr->binary_op();
1708 PushOperand(r2); // Left operand goes on the stack.
1709 VisitForAccumulatorValue(expr->value());
1710
1711 AccumulatorValueContext context(this);
1712 if (ShouldInlineSmiCase(op)) {
1713 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1714 expr->value());
1715 } else {
1716 EmitBinaryOp(expr->binary_operation(), op);
1717 }
1718
1719 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001720 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001721 } else {
1722 VisitForAccumulatorValue(expr->value());
1723 }
1724
1725 SetExpressionPosition(expr);
1726
1727 // Store the value.
1728 switch (assign_type) {
1729 case VARIABLE:
1730 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1731 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001732 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001733 context()->Plug(r2);
1734 break;
1735 case NAMED_PROPERTY:
1736 EmitNamedPropertyAssignment(expr);
1737 break;
1738 case NAMED_SUPER_PROPERTY:
1739 EmitNamedSuperPropertyStore(property);
1740 context()->Plug(r2);
1741 break;
1742 case KEYED_SUPER_PROPERTY:
1743 EmitKeyedSuperPropertyStore(property);
1744 context()->Plug(r2);
1745 break;
1746 case KEYED_PROPERTY:
1747 EmitKeyedPropertyAssignment(expr);
1748 break;
1749 }
1750}
1751
1752void FullCodeGenerator::VisitYield(Yield* expr) {
1753 Comment cmnt(masm_, "[ Yield");
1754 SetExpressionPosition(expr);
1755
1756 // Evaluate yielded value first; the initial iterator definition depends on
1757 // this. It stays on the stack while we update the iterator.
1758 VisitForStackValue(expr->expression());
1759
Ben Murdochc5610432016-08-08 18:44:38 +01001760 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdochda12d292016-06-02 14:46:10 +01001761
1762 __ b(&suspend);
1763 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001764 // When we arrive here, r2 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001765 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001766 __ LoadP(r3, FieldMemOperand(r2, JSGeneratorObject::kResumeModeOffset));
1767 __ LoadP(r2, FieldMemOperand(r2, JSGeneratorObject::kInputOffset));
1768 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1769 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1770 __ CmpSmiLiteral(r3, Smi::FromInt(JSGeneratorObject::kReturn), r0);
1771 __ blt(&resume);
1772 __ Push(result_register());
1773 __ bgt(&exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001774 EmitCreateIteratorResult(true);
1775 EmitUnwindAndReturn();
1776
Ben Murdochc5610432016-08-08 18:44:38 +01001777 __ bind(&exception);
1778 __ CallRuntime(Runtime::kThrow);
1779
Ben Murdochda12d292016-06-02 14:46:10 +01001780 __ bind(&suspend);
1781 OperandStackDepthIncrement(1); // Not popped on this path.
1782 VisitForAccumulatorValue(expr->generator_object());
1783 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1784 __ LoadSmiLiteral(r3, Smi::FromInt(continuation.pos()));
1785 __ StoreP(r3, FieldMemOperand(r2, JSGeneratorObject::kContinuationOffset),
1786 r0);
1787 __ StoreP(cp, FieldMemOperand(r2, JSGeneratorObject::kContextOffset), r0);
1788 __ LoadRR(r3, cp);
1789 __ RecordWriteField(r2, JSGeneratorObject::kContextOffset, r3, r4,
1790 kLRHasBeenSaved, kDontSaveFPRegs);
1791 __ AddP(r3, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1792 __ CmpP(sp, r3);
1793 __ beq(&post_runtime);
1794 __ push(r2); // generator object
1795 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001796 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001797 __ bind(&post_runtime);
1798 PopOperand(result_register());
1799 EmitReturnSequence();
1800
1801 __ bind(&resume);
1802 context()->Plug(result_register());
1803}
1804
Ben Murdochda12d292016-06-02 14:46:10 +01001805void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1806 OperandStackDepthIncrement(2);
1807 __ Push(reg1, reg2);
1808}
1809
1810void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1811 Register reg3) {
1812 OperandStackDepthIncrement(3);
1813 __ Push(reg1, reg2, reg3);
1814}
1815
1816void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1817 Register reg3, Register reg4) {
1818 OperandStackDepthIncrement(4);
1819 __ Push(reg1, reg2, reg3, reg4);
1820}
1821
1822void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1823 OperandStackDepthDecrement(2);
1824 __ Pop(reg1, reg2);
1825}
1826
1827void FullCodeGenerator::EmitOperandStackDepthCheck() {
1828 if (FLAG_debug_code) {
1829 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1830 operand_stack_depth_ * kPointerSize;
1831 __ SubP(r2, fp, sp);
1832 __ CmpP(r2, Operand(expected_diff));
1833 __ Assert(eq, kUnexpectedStackDepth);
1834 }
1835}
1836
1837void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1838 Label allocate, done_allocate;
1839
Ben Murdochc5610432016-08-08 18:44:38 +01001840 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &allocate,
1841 NO_ALLOCATION_FLAGS);
Ben Murdochda12d292016-06-02 14:46:10 +01001842 __ b(&done_allocate);
1843
1844 __ bind(&allocate);
1845 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1846 __ CallRuntime(Runtime::kAllocateInNewSpace);
1847
1848 __ bind(&done_allocate);
1849 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3);
1850 PopOperand(r4);
1851 __ LoadRoot(r5,
1852 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1853 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
1854 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0);
1855 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
1856 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
1857 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0);
1858 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0);
1859}
1860
1861void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1862 Token::Value op,
1863 Expression* left_expr,
1864 Expression* right_expr) {
1865 Label done, smi_case, stub_call;
1866
1867 Register scratch1 = r4;
1868 Register scratch2 = r5;
1869
1870 // Get the arguments.
1871 Register left = r3;
1872 Register right = r2;
1873 PopOperand(left);
1874
1875 // Perform combined smi check on both operands.
1876 __ LoadRR(scratch1, right);
1877 __ OrP(scratch1, left);
1878 STATIC_ASSERT(kSmiTag == 0);
1879 JumpPatchSite patch_site(masm_);
1880 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1881
1882 __ bind(&stub_call);
1883 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1884 CallIC(code, expr->BinaryOperationFeedbackId());
1885 patch_site.EmitPatchInfo();
1886 __ b(&done);
1887
1888 __ bind(&smi_case);
1889 // Smi case. This code works the same way as the smi-smi case in the type
1890 // recording binary operation stub.
1891 switch (op) {
1892 case Token::SAR:
1893 __ GetLeastBitsFromSmi(scratch1, right, 5);
1894 __ ShiftRightArithP(right, left, scratch1);
1895 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
1896 break;
1897 case Token::SHL: {
1898 __ GetLeastBitsFromSmi(scratch2, right, 5);
1899#if V8_TARGET_ARCH_S390X
1900 __ ShiftLeftP(right, left, scratch2);
1901#else
1902 __ SmiUntag(scratch1, left);
1903 __ ShiftLeftP(scratch1, scratch1, scratch2);
1904 // Check that the *signed* result fits in a smi
1905 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
1906 __ SmiTag(right, scratch1);
1907#endif
1908 break;
1909 }
1910 case Token::SHR: {
1911 __ SmiUntag(scratch1, left);
1912 __ GetLeastBitsFromSmi(scratch2, right, 5);
1913 __ srl(scratch1, scratch2);
1914 // Unsigned shift is not allowed to produce a negative number.
1915 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
1916 __ SmiTag(right, scratch1);
1917 break;
1918 }
1919 case Token::ADD: {
1920 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1921 __ BranchOnOverflow(&stub_call);
1922 __ LoadRR(right, scratch1);
1923 break;
1924 }
1925 case Token::SUB: {
1926 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1927 __ BranchOnOverflow(&stub_call);
1928 __ LoadRR(right, scratch1);
1929 break;
1930 }
1931 case Token::MUL: {
1932 Label mul_zero;
1933#if V8_TARGET_ARCH_S390X
1934 // Remove tag from both operands.
1935 __ SmiUntag(ip, right);
1936 __ SmiUntag(scratch2, left);
1937 __ mr_z(scratch1, ip);
1938 // Check for overflowing the smi range - no overflow if higher 33 bits of
1939 // the result are identical.
1940 __ lr(ip, scratch2); // 32 bit load
1941 __ sra(ip, Operand(31));
1942 __ cr_z(ip, scratch1); // 32 bit compare
1943 __ bne(&stub_call);
1944#else
1945 __ SmiUntag(ip, right);
1946 __ LoadRR(scratch2, left); // load into low order of reg pair
1947 __ mr_z(scratch1, ip); // R4:R5 = R5 * ip
1948 // Check for overflowing the smi range - no overflow if higher 33 bits of
1949 // the result are identical.
1950 __ TestIfInt32(scratch1, scratch2, ip);
1951 __ bne(&stub_call);
1952#endif
1953 // Go slow on zero result to handle -0.
1954 __ chi(scratch2, Operand::Zero());
1955 __ beq(&mul_zero, Label::kNear);
1956#if V8_TARGET_ARCH_S390X
1957 __ SmiTag(right, scratch2);
1958#else
1959 __ LoadRR(right, scratch2);
1960#endif
1961 __ b(&done);
1962 // We need -0 if we were multiplying a negative number with 0 to get 0.
1963 // We know one of them was zero.
1964 __ bind(&mul_zero);
1965 __ AddP(scratch2, right, left);
1966 __ CmpP(scratch2, Operand::Zero());
1967 __ blt(&stub_call);
1968 __ LoadSmiLiteral(right, Smi::FromInt(0));
1969 break;
1970 }
1971 case Token::BIT_OR:
1972 __ OrP(right, left);
1973 break;
1974 case Token::BIT_AND:
1975 __ AndP(right, left);
1976 break;
1977 case Token::BIT_XOR:
1978 __ XorP(right, left);
1979 break;
1980 default:
1981 UNREACHABLE();
1982 }
1983
1984 __ bind(&done);
1985 context()->Plug(r2);
1986}
1987
1988void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1989 for (int i = 0; i < lit->properties()->length(); i++) {
1990 ObjectLiteral::Property* property = lit->properties()->at(i);
1991 Expression* value = property->value();
1992
1993 Register scratch = r3;
1994 if (property->is_static()) {
1995 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
1996 } else {
1997 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
1998 }
1999 PushOperand(scratch);
2000 EmitPropertyKey(property, lit->GetIdForProperty(i));
2001
2002 // The static prototype property is read only. We handle the non computed
2003 // property name case in the parser. Since this is the only case where we
2004 // need to check for an own read only property we special case this so we do
2005 // not need to do this for every property.
2006 if (property->is_static() && property->is_computed_name()) {
2007 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2008 __ push(r2);
2009 }
2010
2011 VisitForStackValue(value);
2012 if (NeedsHomeObject(value)) {
2013 EmitSetHomeObject(value, 2, property->GetSlot());
2014 }
2015
2016 switch (property->kind()) {
2017 case ObjectLiteral::Property::CONSTANT:
2018 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2019 case ObjectLiteral::Property::PROTOTYPE:
2020 UNREACHABLE();
2021 case ObjectLiteral::Property::COMPUTED:
2022 PushOperand(Smi::FromInt(DONT_ENUM));
2023 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2024 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
2025 break;
2026
2027 case ObjectLiteral::Property::GETTER:
2028 PushOperand(Smi::FromInt(DONT_ENUM));
2029 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
2030 break;
2031
2032 case ObjectLiteral::Property::SETTER:
2033 PushOperand(Smi::FromInt(DONT_ENUM));
2034 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
2035 break;
2036
2037 default:
2038 UNREACHABLE();
2039 }
2040 }
2041}
2042
2043void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2044 PopOperand(r3);
2045 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2046 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2047 CallIC(code, expr->BinaryOperationFeedbackId());
2048 patch_site.EmitPatchInfo();
2049 context()->Plug(r2);
2050}
2051
2052void FullCodeGenerator::EmitAssignment(Expression* expr,
2053 FeedbackVectorSlot slot) {
2054 DCHECK(expr->IsValidReferenceExpressionOrThis());
2055
2056 Property* prop = expr->AsProperty();
2057 LhsKind assign_type = Property::GetAssignType(prop);
2058
2059 switch (assign_type) {
2060 case VARIABLE: {
2061 Variable* var = expr->AsVariableProxy()->var();
2062 EffectContext context(this);
2063 EmitVariableAssignment(var, Token::ASSIGN, slot);
2064 break;
2065 }
2066 case NAMED_PROPERTY: {
2067 PushOperand(r2); // Preserve value.
2068 VisitForAccumulatorValue(prop->obj());
2069 __ Move(StoreDescriptor::ReceiverRegister(), r2);
2070 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
2071 __ mov(StoreDescriptor::NameRegister(),
2072 Operand(prop->key()->AsLiteral()->value()));
2073 EmitLoadStoreICSlot(slot);
2074 CallStoreIC();
2075 break;
2076 }
2077 case NAMED_SUPER_PROPERTY: {
2078 PushOperand(r2);
2079 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2080 VisitForAccumulatorValue(
2081 prop->obj()->AsSuperPropertyReference()->home_object());
2082 // stack: value, this; r2: home_object
2083 Register scratch = r4;
2084 Register scratch2 = r5;
2085 __ LoadRR(scratch, result_register()); // home_object
2086 __ LoadP(r2, MemOperand(sp, kPointerSize)); // value
2087 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2088 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2089 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2090 // stack: this, home_object; r2: value
2091 EmitNamedSuperPropertyStore(prop);
2092 break;
2093 }
2094 case KEYED_SUPER_PROPERTY: {
2095 PushOperand(r2);
2096 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2097 VisitForStackValue(
2098 prop->obj()->AsSuperPropertyReference()->home_object());
2099 VisitForAccumulatorValue(prop->key());
2100 Register scratch = r4;
2101 Register scratch2 = r5;
2102 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2103 // stack: value, this, home_object; r3: key, r6: value
2104 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2105 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2106 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2107 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2108 __ StoreP(r2, MemOperand(sp, 0));
2109 __ Move(r2, scratch2);
2110 // stack: this, home_object, key; r2: value.
2111 EmitKeyedSuperPropertyStore(prop);
2112 break;
2113 }
2114 case KEYED_PROPERTY: {
2115 PushOperand(r2); // Preserve value.
2116 VisitForStackValue(prop->obj());
2117 VisitForAccumulatorValue(prop->key());
2118 __ Move(StoreDescriptor::NameRegister(), r2);
2119 PopOperands(StoreDescriptor::ValueRegister(),
2120 StoreDescriptor::ReceiverRegister());
2121 EmitLoadStoreICSlot(slot);
2122 Handle<Code> ic =
2123 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2124 CallIC(ic);
2125 break;
2126 }
2127 }
2128 context()->Plug(r2);
2129}
2130
2131void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2132 Variable* var, MemOperand location) {
2133 __ StoreP(result_register(), location);
2134 if (var->IsContextSlot()) {
2135 // RecordWrite may destroy all its register arguments.
2136 __ LoadRR(r5, result_register());
2137 int offset = Context::SlotOffset(var->index());
2138 __ RecordWriteContextSlot(r3, offset, r5, r4, kLRHasBeenSaved,
2139 kDontSaveFPRegs);
2140 }
2141}
2142
2143void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2144 FeedbackVectorSlot slot) {
2145 if (var->IsUnallocated()) {
2146 // Global var, const, or let.
2147 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2148 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2149 EmitLoadStoreICSlot(slot);
2150 CallStoreIC();
2151
2152 } else if (var->mode() == LET && op != Token::INIT) {
2153 // Non-initializing assignment to let variable needs a write barrier.
2154 DCHECK(!var->IsLookupSlot());
2155 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2156 Label assign;
2157 MemOperand location = VarOperand(var, r3);
2158 __ LoadP(r5, location);
2159 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2160 __ bne(&assign);
2161 __ mov(r5, Operand(var->name()));
2162 __ push(r5);
2163 __ CallRuntime(Runtime::kThrowReferenceError);
2164 // Perform the assignment.
2165 __ bind(&assign);
2166 EmitStoreToStackLocalOrContextSlot(var, location);
2167
2168 } else if (var->mode() == CONST && op != Token::INIT) {
2169 // Assignment to const variable needs a write barrier.
2170 DCHECK(!var->IsLookupSlot());
2171 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2172 Label const_error;
2173 MemOperand location = VarOperand(var, r3);
2174 __ LoadP(r5, location);
2175 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2176 __ bne(&const_error, Label::kNear);
2177 __ mov(r5, Operand(var->name()));
2178 __ push(r5);
2179 __ CallRuntime(Runtime::kThrowReferenceError);
2180 __ bind(&const_error);
2181 __ CallRuntime(Runtime::kThrowConstAssignError);
2182
2183 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2184 // Initializing assignment to const {this} needs a write barrier.
2185 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2186 Label uninitialized_this;
2187 MemOperand location = VarOperand(var, r3);
2188 __ LoadP(r5, location);
2189 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2190 __ beq(&uninitialized_this);
2191 __ mov(r3, Operand(var->name()));
2192 __ push(r3);
2193 __ CallRuntime(Runtime::kThrowReferenceError);
2194 __ bind(&uninitialized_this);
2195 EmitStoreToStackLocalOrContextSlot(var, location);
2196
Ben Murdochc5610432016-08-08 18:44:38 +01002197 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdochda12d292016-06-02 14:46:10 +01002198 if (var->IsLookupSlot()) {
2199 // Assignment to var.
2200 __ Push(var->name());
2201 __ Push(r2);
2202 __ CallRuntime(is_strict(language_mode())
2203 ? Runtime::kStoreLookupSlot_Strict
2204 : Runtime::kStoreLookupSlot_Sloppy);
2205 } else {
2206 // Assignment to var or initializing assignment to let/const in harmony
2207 // mode.
2208 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2209 MemOperand location = VarOperand(var, r3);
2210 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2211 // Check for an uninitialized let binding.
2212 __ LoadP(r4, location);
2213 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
2214 __ Check(eq, kLetBindingReInitialization);
2215 }
2216 EmitStoreToStackLocalOrContextSlot(var, location);
2217 }
Ben Murdochda12d292016-06-02 14:46:10 +01002218 } else {
2219 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2220 if (is_strict(language_mode())) {
2221 __ CallRuntime(Runtime::kThrowConstAssignError);
2222 }
2223 // Silently ignore store in sloppy mode.
2224 }
2225}
2226
2227void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2228 // Assignment to a property, using a named store IC.
2229 Property* prop = expr->target()->AsProperty();
2230 DCHECK(prop != NULL);
2231 DCHECK(prop->key()->IsLiteral());
2232
2233 __ mov(StoreDescriptor::NameRegister(),
2234 Operand(prop->key()->AsLiteral()->value()));
2235 PopOperand(StoreDescriptor::ReceiverRegister());
2236 EmitLoadStoreICSlot(expr->AssignmentSlot());
2237 CallStoreIC();
2238
Ben Murdochc5610432016-08-08 18:44:38 +01002239 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002240 context()->Plug(r2);
2241}
2242
2243void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2244 // Assignment to named property of super.
2245 // r2 : value
2246 // stack : receiver ('this'), home_object
2247 DCHECK(prop != NULL);
2248 Literal* key = prop->key()->AsLiteral();
2249 DCHECK(key != NULL);
2250
2251 PushOperand(key->value());
2252 PushOperand(r2);
2253 CallRuntimeWithOperands((is_strict(language_mode())
2254 ? Runtime::kStoreToSuper_Strict
2255 : Runtime::kStoreToSuper_Sloppy));
2256}
2257
2258void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2259 // Assignment to named property of super.
2260 // r2 : value
2261 // stack : receiver ('this'), home_object, key
2262 DCHECK(prop != NULL);
2263
2264 PushOperand(r2);
2265 CallRuntimeWithOperands((is_strict(language_mode())
2266 ? Runtime::kStoreKeyedToSuper_Strict
2267 : Runtime::kStoreKeyedToSuper_Sloppy));
2268}
2269
2270void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2271 // Assignment to a property, using a keyed store IC.
2272 PopOperands(StoreDescriptor::ReceiverRegister(),
2273 StoreDescriptor::NameRegister());
2274 DCHECK(StoreDescriptor::ValueRegister().is(r2));
2275
2276 Handle<Code> ic =
2277 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2278 EmitLoadStoreICSlot(expr->AssignmentSlot());
2279 CallIC(ic);
2280
Ben Murdochc5610432016-08-08 18:44:38 +01002281 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002282 context()->Plug(r2);
2283}
2284
2285void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2286 ic_total_count_++;
2287 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2288}
2289
2290// Code common for calls using the IC.
2291void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2292 Expression* callee = expr->expression();
2293
2294 // Get the target function.
2295 ConvertReceiverMode convert_mode;
2296 if (callee->IsVariableProxy()) {
2297 {
2298 StackValueContext context(this);
2299 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002300 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002301 }
2302 // Push undefined as receiver. This is patched in the method prologue if it
2303 // is a sloppy mode method.
2304 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2305 PushOperand(r1);
2306 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2307 } else {
2308 // Load the function from the receiver.
2309 DCHECK(callee->IsProperty());
2310 DCHECK(!callee->AsProperty()->IsSuperAccess());
2311 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2312 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002313 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2314 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002315 // Push the target function under the receiver.
2316 __ LoadP(r1, MemOperand(sp, 0));
2317 PushOperand(r1);
2318 __ StoreP(r2, MemOperand(sp, kPointerSize));
2319 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2320 }
2321
2322 EmitCall(expr, convert_mode);
2323}
2324
2325void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2326 Expression* callee = expr->expression();
2327 DCHECK(callee->IsProperty());
2328 Property* prop = callee->AsProperty();
2329 DCHECK(prop->IsSuperAccess());
2330 SetExpressionPosition(prop);
2331
2332 Literal* key = prop->key()->AsLiteral();
2333 DCHECK(!key->value()->IsSmi());
2334 // Load the function from the receiver.
2335 const Register scratch = r3;
2336 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2337 VisitForAccumulatorValue(super_ref->home_object());
2338 __ LoadRR(scratch, r2);
2339 VisitForAccumulatorValue(super_ref->this_var());
2340 PushOperands(scratch, r2, r2, scratch);
2341 PushOperand(key->value());
2342
2343 // Stack here:
2344 // - home_object
2345 // - this (receiver)
2346 // - this (receiver) <-- LoadFromSuper will pop here and below.
2347 // - home_object
2348 // - key
2349 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002350 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002351
2352 // Replace home_object with target function.
2353 __ StoreP(r2, MemOperand(sp, kPointerSize));
2354
2355 // Stack here:
2356 // - target function
2357 // - this (receiver)
2358 EmitCall(expr);
2359}
2360
2361// Code common for calls using the IC.
2362void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2363 // Load the key.
2364 VisitForAccumulatorValue(key);
2365
2366 Expression* callee = expr->expression();
2367
2368 // Load the function from the receiver.
2369 DCHECK(callee->IsProperty());
2370 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2371 __ Move(LoadDescriptor::NameRegister(), r2);
2372 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002373 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2374 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002375
2376 // Push the target function under the receiver.
2377 __ LoadP(ip, MemOperand(sp, 0));
2378 PushOperand(ip);
2379 __ StoreP(r2, MemOperand(sp, kPointerSize));
2380
2381 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2382}
2383
2384void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2385 Expression* callee = expr->expression();
2386 DCHECK(callee->IsProperty());
2387 Property* prop = callee->AsProperty();
2388 DCHECK(prop->IsSuperAccess());
2389
2390 SetExpressionPosition(prop);
2391 // Load the function from the receiver.
2392 const Register scratch = r3;
2393 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2394 VisitForAccumulatorValue(super_ref->home_object());
2395 __ LoadRR(scratch, r2);
2396 VisitForAccumulatorValue(super_ref->this_var());
2397 PushOperands(scratch, r2, r2, scratch);
2398 VisitForStackValue(prop->key());
2399
2400 // Stack here:
2401 // - home_object
2402 // - this (receiver)
2403 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2404 // - home_object
2405 // - key
2406 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002407 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002408
2409 // Replace home_object with target function.
2410 __ StoreP(r2, MemOperand(sp, kPointerSize));
2411
2412 // Stack here:
2413 // - target function
2414 // - this (receiver)
2415 EmitCall(expr);
2416}
2417
2418void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2419 // Load the arguments.
2420 ZoneList<Expression*>* args = expr->arguments();
2421 int arg_count = args->length();
2422 for (int i = 0; i < arg_count; i++) {
2423 VisitForStackValue(args->at(i));
2424 }
2425
Ben Murdochc5610432016-08-08 18:44:38 +01002426 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002427 SetCallPosition(expr, expr->tail_call_mode());
2428 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2429 if (FLAG_trace) {
2430 __ CallRuntime(Runtime::kTraceTailCall);
2431 }
2432 // Update profiling counters before the tail call since we will
2433 // not return to this function.
2434 EmitProfilingCounterHandlingForReturnSequence(true);
2435 }
2436 Handle<Code> ic =
2437 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2438 .code();
2439 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallFeedbackICSlot()));
2440 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2441 // Don't assign a type feedback id to the IC, since type feedback is provided
2442 // by the vector above.
2443 CallIC(ic);
2444 OperandStackDepthDecrement(arg_count + 1);
2445
2446 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002447 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002448 context()->DropAndPlug(1, r2);
2449}
2450
Ben Murdochc5610432016-08-08 18:44:38 +01002451void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2452 int arg_count = expr->arguments()->length();
Ben Murdochda12d292016-06-02 14:46:10 +01002453 // r6: copy of the first argument or undefined if it doesn't exist.
2454 if (arg_count > 0) {
2455 __ LoadP(r6, MemOperand(sp, arg_count * kPointerSize), r0);
2456 } else {
2457 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
2458 }
2459
2460 // r5: the receiver of the enclosing function.
2461 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2462
2463 // r4: language mode.
2464 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode()));
2465
2466 // r3: the start position of the scope the calls resides in.
2467 __ LoadSmiLiteral(r3, Smi::FromInt(scope()->start_position()));
2468
Ben Murdochc5610432016-08-08 18:44:38 +01002469 // r2: the source position of the eval call.
2470 __ LoadSmiLiteral(r2, Smi::FromInt(expr->position()));
2471
Ben Murdochda12d292016-06-02 14:46:10 +01002472 // Do the runtime call.
Ben Murdochc5610432016-08-08 18:44:38 +01002473 __ Push(r6, r5, r4, r3, r2);
Ben Murdochda12d292016-06-02 14:46:10 +01002474 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2475}
2476
2477// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2478void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2479 VariableProxy* callee = expr->expression()->AsVariableProxy();
2480 if (callee->var()->IsLookupSlot()) {
2481 Label slow, done;
2482 SetExpressionPosition(callee);
2483 // Generate code for loading from variables potentially shadowed by
2484 // eval-introduced variables.
2485 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2486
2487 __ bind(&slow);
2488 // Call the runtime to find the function to call (returned in r2) and
2489 // the object holding it (returned in r3).
2490 __ Push(callee->name());
2491 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2492 PushOperands(r2, r3); // Function, receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002493 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002494
2495 // If fast case code has been generated, emit code to push the function
2496 // and receiver and have the slow path jump around this code.
2497 if (done.is_linked()) {
2498 Label call;
2499 __ b(&call);
2500 __ bind(&done);
2501 // Push function.
2502 __ push(r2);
2503 // Pass undefined as the receiver, which is the WithBaseObject of a
2504 // non-object environment record. If the callee is sloppy, it will patch
2505 // it up to be the global receiver.
2506 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2507 __ push(r3);
2508 __ bind(&call);
2509 }
2510 } else {
2511 VisitForStackValue(callee);
2512 // refEnv.WithBaseObject()
2513 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2514 PushOperand(r4); // Reserved receiver slot.
2515 }
2516}
2517
2518void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002519 // In a call to eval, we first call
2520 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
2521 // to call. Then we call the resolved function using the given arguments.
Ben Murdochda12d292016-06-02 14:46:10 +01002522 ZoneList<Expression*>* args = expr->arguments();
2523 int arg_count = args->length();
2524
2525 PushCalleeAndWithBaseObject(expr);
2526
2527 // Push the arguments.
2528 for (int i = 0; i < arg_count; i++) {
2529 VisitForStackValue(args->at(i));
2530 }
2531
2532 // Push a copy of the function (found below the arguments) and
2533 // resolve eval.
2534 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2535 __ push(r3);
Ben Murdochc5610432016-08-08 18:44:38 +01002536 EmitResolvePossiblyDirectEval(expr);
Ben Murdochda12d292016-06-02 14:46:10 +01002537
2538 // Touch up the stack with the resolved function.
2539 __ StoreP(r2, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2540
Ben Murdochc5610432016-08-08 18:44:38 +01002541 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002542
2543 // Record source position for debugger.
2544 SetCallPosition(expr);
2545 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2546 __ mov(r2, Operand(arg_count));
2547 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2548 expr->tail_call_mode()),
2549 RelocInfo::CODE_TARGET);
2550 OperandStackDepthDecrement(arg_count + 1);
2551 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002552 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002553 context()->DropAndPlug(1, r2);
2554}
2555
2556void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2557 Comment cmnt(masm_, "[ CallNew");
2558 // According to ECMA-262, section 11.2.2, page 44, the function
2559 // expression in new calls must be evaluated before the
2560 // arguments.
2561
2562 // Push constructor on the stack. If it's not a function it's used as
2563 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2564 // ignored.
2565 DCHECK(!expr->expression()->IsSuperPropertyReference());
2566 VisitForStackValue(expr->expression());
2567
2568 // Push the arguments ("left-to-right") on the stack.
2569 ZoneList<Expression*>* args = expr->arguments();
2570 int arg_count = args->length();
2571 for (int i = 0; i < arg_count; i++) {
2572 VisitForStackValue(args->at(i));
2573 }
2574
2575 // Call the construct call builtin that handles allocation and
2576 // constructor invocation.
2577 SetConstructCallPosition(expr);
2578
2579 // Load function and argument count into r3 and r2.
2580 __ mov(r2, Operand(arg_count));
2581 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize), r0);
2582
2583 // Record call targets in unoptimized code.
2584 __ EmitLoadTypeFeedbackVector(r4);
2585 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallNewFeedbackSlot()));
2586
2587 CallConstructStub stub(isolate());
2588 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2589 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002590 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2591 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002592 context()->Plug(r2);
2593}
2594
2595void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2596 SuperCallReference* super_call_ref =
2597 expr->expression()->AsSuperCallReference();
2598 DCHECK_NOT_NULL(super_call_ref);
2599
2600 // Push the super constructor target on the stack (may be null,
2601 // but the Construct builtin can deal with that properly).
2602 VisitForAccumulatorValue(super_call_ref->this_function_var());
2603 __ AssertFunction(result_register());
2604 __ LoadP(result_register(),
2605 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2606 __ LoadP(result_register(),
2607 FieldMemOperand(result_register(), Map::kPrototypeOffset));
2608 PushOperand(result_register());
2609
2610 // Push the arguments ("left-to-right") on the stack.
2611 ZoneList<Expression*>* args = expr->arguments();
2612 int arg_count = args->length();
2613 for (int i = 0; i < arg_count; i++) {
2614 VisitForStackValue(args->at(i));
2615 }
2616
2617 // Call the construct call builtin that handles allocation and
2618 // constructor invocation.
2619 SetConstructCallPosition(expr);
2620
2621 // Load new target into r5.
2622 VisitForAccumulatorValue(super_call_ref->new_target_var());
2623 __ LoadRR(r5, result_register());
2624
2625 // Load function and argument count into r1 and r0.
2626 __ mov(r2, Operand(arg_count));
2627 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize));
2628
2629 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2630 OperandStackDepthDecrement(arg_count + 1);
2631
2632 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002633 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002634 context()->Plug(r2);
2635}
2636
2637void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2638 ZoneList<Expression*>* args = expr->arguments();
2639 DCHECK(args->length() == 1);
2640
2641 VisitForAccumulatorValue(args->at(0));
2642
2643 Label materialize_true, materialize_false, skip_lookup;
2644 Label* if_true = NULL;
2645 Label* if_false = NULL;
2646 Label* fall_through = NULL;
2647 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2648 &if_false, &fall_through);
2649
2650 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2651 __ TestIfSmi(r2);
2652 Split(eq, if_true, if_false, fall_through);
2653
2654 context()->Plug(if_true, if_false);
2655}
2656
2657void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2658 ZoneList<Expression*>* args = expr->arguments();
2659 DCHECK(args->length() == 1);
2660
2661 VisitForAccumulatorValue(args->at(0));
2662
2663 Label materialize_true, materialize_false;
2664 Label* if_true = NULL;
2665 Label* if_false = NULL;
2666 Label* fall_through = NULL;
2667 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2668 &if_false, &fall_through);
2669
2670 __ JumpIfSmi(r2, if_false);
2671 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE);
2672 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2673 Split(ge, if_true, if_false, fall_through);
2674
2675 context()->Plug(if_true, if_false);
2676}
2677
2678void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2679 ZoneList<Expression*>* args = expr->arguments();
2680 DCHECK(args->length() == 1);
2681
2682 VisitForAccumulatorValue(args->at(0));
2683
2684 Label materialize_true, materialize_false;
2685 Label* if_true = NULL;
2686 Label* if_false = NULL;
2687 Label* fall_through = NULL;
2688 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2689 &if_false, &fall_through);
2690
2691 __ JumpIfSmi(r2, if_false);
2692 __ CompareObjectType(r2, r3, r3, JS_ARRAY_TYPE);
2693 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2694 Split(eq, if_true, if_false, fall_through);
2695
2696 context()->Plug(if_true, if_false);
2697}
2698
2699void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2700 ZoneList<Expression*>* args = expr->arguments();
2701 DCHECK(args->length() == 1);
2702
2703 VisitForAccumulatorValue(args->at(0));
2704
2705 Label materialize_true, materialize_false;
2706 Label* if_true = NULL;
2707 Label* if_false = NULL;
2708 Label* fall_through = NULL;
2709 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2710 &if_false, &fall_through);
2711
2712 __ JumpIfSmi(r2, if_false);
2713 __ CompareObjectType(r2, r3, r3, JS_TYPED_ARRAY_TYPE);
2714 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2715 Split(eq, if_true, if_false, fall_through);
2716
2717 context()->Plug(if_true, if_false);
2718}
2719
2720void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2721 ZoneList<Expression*>* args = expr->arguments();
2722 DCHECK(args->length() == 1);
2723
2724 VisitForAccumulatorValue(args->at(0));
2725
2726 Label materialize_true, materialize_false;
2727 Label* if_true = NULL;
2728 Label* if_false = NULL;
2729 Label* fall_through = NULL;
2730 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2731 &if_false, &fall_through);
2732
2733 __ JumpIfSmi(r2, if_false);
2734 __ CompareObjectType(r2, r3, r3, JS_REGEXP_TYPE);
2735 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2736 Split(eq, if_true, if_false, fall_through);
2737
2738 context()->Plug(if_true, if_false);
2739}
2740
2741void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2742 ZoneList<Expression*>* args = expr->arguments();
2743 DCHECK(args->length() == 1);
2744
2745 VisitForAccumulatorValue(args->at(0));
2746
2747 Label materialize_true, materialize_false;
2748 Label* if_true = NULL;
2749 Label* if_false = NULL;
2750 Label* fall_through = NULL;
2751 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2752 &if_false, &fall_through);
2753
2754 __ JumpIfSmi(r2, if_false);
2755 __ CompareObjectType(r2, r3, r3, JS_PROXY_TYPE);
2756 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2757 Split(eq, if_true, if_false, fall_through);
2758
2759 context()->Plug(if_true, if_false);
2760}
2761
2762void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2763 ZoneList<Expression*>* args = expr->arguments();
2764 DCHECK(args->length() == 1);
2765 Label done, null, function, non_function_constructor;
2766
2767 VisitForAccumulatorValue(args->at(0));
2768
2769 // If the object is not a JSReceiver, we return null.
2770 __ JumpIfSmi(r2, &null);
2771 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2772 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE);
2773 // Map is now in r2.
2774 __ blt(&null);
2775
2776 // Return 'Function' for JSFunction and JSBoundFunction objects.
2777 __ CmpLogicalP(r3, Operand(FIRST_FUNCTION_TYPE));
2778 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2779 __ bge(&function);
2780
2781 // Check if the constructor in the map is a JS function.
2782 Register instance_type = r4;
2783 __ GetMapConstructor(r2, r2, r3, instance_type);
2784 __ CmpP(instance_type, Operand(JS_FUNCTION_TYPE));
2785 __ bne(&non_function_constructor, Label::kNear);
2786
2787 // r2 now contains the constructor function. Grab the
2788 // instance class name from there.
2789 __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset));
2790 __ LoadP(r2,
2791 FieldMemOperand(r2, SharedFunctionInfo::kInstanceClassNameOffset));
2792 __ b(&done, Label::kNear);
2793
2794 // Functions have class 'Function'.
2795 __ bind(&function);
2796 __ LoadRoot(r2, Heap::kFunction_stringRootIndex);
2797 __ b(&done, Label::kNear);
2798
2799 // Objects with a non-function constructor have class 'Object'.
2800 __ bind(&non_function_constructor);
2801 __ LoadRoot(r2, Heap::kObject_stringRootIndex);
2802 __ b(&done, Label::kNear);
2803
2804 // Non-JS objects have class null.
2805 __ bind(&null);
2806 __ LoadRoot(r2, Heap::kNullValueRootIndex);
2807
2808 // All done.
2809 __ bind(&done);
2810
2811 context()->Plug(r2);
2812}
2813
2814void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2815 ZoneList<Expression*>* args = expr->arguments();
2816 DCHECK(args->length() == 1);
2817 VisitForAccumulatorValue(args->at(0)); // Load the object.
2818
2819 Label done;
2820 // If the object is a smi return the object.
2821 __ JumpIfSmi(r2, &done);
2822 // If the object is not a value type, return the object.
2823 __ CompareObjectType(r2, r3, r3, JS_VALUE_TYPE);
2824 __ bne(&done, Label::kNear);
2825 __ LoadP(r2, FieldMemOperand(r2, JSValue::kValueOffset));
2826
2827 __ bind(&done);
2828 context()->Plug(r2);
2829}
2830
2831void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2832 ZoneList<Expression*>* args = expr->arguments();
2833 DCHECK_EQ(3, args->length());
2834
2835 Register string = r2;
2836 Register index = r3;
2837 Register value = r4;
2838
2839 VisitForStackValue(args->at(0)); // index
2840 VisitForStackValue(args->at(1)); // value
2841 VisitForAccumulatorValue(args->at(2)); // string
2842 PopOperands(index, value);
2843
2844 if (FLAG_debug_code) {
2845 __ TestIfSmi(value);
2846 __ Check(eq, kNonSmiValue, cr0);
2847 __ TestIfSmi(index);
2848 __ Check(eq, kNonSmiIndex, cr0);
2849 __ SmiUntag(index);
2850 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2851 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
2852 __ SmiTag(index);
2853 }
2854
2855 __ SmiUntag(value);
2856 __ AddP(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2857 __ SmiToByteArrayOffset(r1, index);
2858 __ StoreByte(value, MemOperand(ip, r1));
2859 context()->Plug(string);
2860}
2861
2862void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2863 ZoneList<Expression*>* args = expr->arguments();
2864 DCHECK_EQ(3, args->length());
2865
2866 Register string = r2;
2867 Register index = r3;
2868 Register value = r4;
2869
2870 VisitForStackValue(args->at(0)); // index
2871 VisitForStackValue(args->at(1)); // value
2872 VisitForAccumulatorValue(args->at(2)); // string
2873 PopOperands(index, value);
2874
2875 if (FLAG_debug_code) {
2876 __ TestIfSmi(value);
2877 __ Check(eq, kNonSmiValue, cr0);
2878 __ TestIfSmi(index);
2879 __ Check(eq, kNonSmiIndex, cr0);
2880 __ SmiUntag(index, index);
2881 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2882 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
2883 __ SmiTag(index, index);
2884 }
2885
2886 __ SmiUntag(value);
2887 __ SmiToShortArrayOffset(r1, index);
2888 __ StoreHalfWord(value, MemOperand(r1, string, SeqTwoByteString::kHeaderSize -
2889 kHeapObjectTag));
2890 context()->Plug(string);
2891}
2892
2893void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2894 ZoneList<Expression*>* args = expr->arguments();
2895 DCHECK(args->length() == 1);
2896 VisitForAccumulatorValue(args->at(0));
2897
2898 Label done;
2899 StringCharFromCodeGenerator generator(r2, r3);
2900 generator.GenerateFast(masm_);
2901 __ b(&done);
2902
2903 NopRuntimeCallHelper call_helper;
2904 generator.GenerateSlow(masm_, call_helper);
2905
2906 __ bind(&done);
2907 context()->Plug(r3);
2908}
2909
2910void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2911 ZoneList<Expression*>* args = expr->arguments();
2912 DCHECK(args->length() == 2);
2913 VisitForStackValue(args->at(0));
2914 VisitForAccumulatorValue(args->at(1));
2915
2916 Register object = r3;
2917 Register index = r2;
2918 Register result = r5;
2919
2920 PopOperand(object);
2921
2922 Label need_conversion;
2923 Label index_out_of_range;
2924 Label done;
2925 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2926 &need_conversion, &index_out_of_range,
2927 STRING_INDEX_IS_NUMBER);
2928 generator.GenerateFast(masm_);
2929 __ b(&done);
2930
2931 __ bind(&index_out_of_range);
2932 // When the index is out of range, the spec requires us to return
2933 // NaN.
2934 __ LoadRoot(result, Heap::kNanValueRootIndex);
2935 __ b(&done);
2936
2937 __ bind(&need_conversion);
2938 // Load the undefined value into the result register, which will
2939 // trigger conversion.
2940 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2941 __ b(&done);
2942
2943 NopRuntimeCallHelper call_helper;
2944 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2945
2946 __ bind(&done);
2947 context()->Plug(result);
2948}
2949
2950void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
2951 ZoneList<Expression*>* args = expr->arguments();
2952 DCHECK(args->length() == 2);
2953 VisitForStackValue(args->at(0));
2954 VisitForAccumulatorValue(args->at(1));
2955
2956 Register object = r3;
2957 Register index = r2;
2958 Register scratch = r5;
2959 Register result = r2;
2960
2961 PopOperand(object);
2962
2963 Label need_conversion;
2964 Label index_out_of_range;
2965 Label done;
2966 StringCharAtGenerator generator(object, index, scratch, result,
2967 &need_conversion, &need_conversion,
2968 &index_out_of_range, STRING_INDEX_IS_NUMBER);
2969 generator.GenerateFast(masm_);
2970 __ b(&done);
2971
2972 __ bind(&index_out_of_range);
2973 // When the index is out of range, the spec requires us to return
2974 // the empty string.
2975 __ LoadRoot(result, Heap::kempty_stringRootIndex);
2976 __ b(&done);
2977
2978 __ bind(&need_conversion);
2979 // Move smi zero into the result register, which will trigger
2980 // conversion.
2981 __ LoadSmiLiteral(result, Smi::FromInt(0));
2982 __ b(&done);
2983
2984 NopRuntimeCallHelper call_helper;
2985 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2986
2987 __ bind(&done);
2988 context()->Plug(result);
2989}
2990
2991void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2992 ZoneList<Expression*>* args = expr->arguments();
2993 DCHECK_LE(2, args->length());
2994 // Push target, receiver and arguments onto the stack.
2995 for (Expression* const arg : *args) {
2996 VisitForStackValue(arg);
2997 }
Ben Murdochc5610432016-08-08 18:44:38 +01002998 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002999 // Move target to r3.
3000 int const argc = args->length() - 2;
3001 __ LoadP(r3, MemOperand(sp, (argc + 1) * kPointerSize));
3002 // Call the target.
3003 __ mov(r2, Operand(argc));
3004 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3005 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003006 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01003007 // Discard the function left on TOS.
3008 context()->DropAndPlug(1, r2);
3009}
3010
3011void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3012 ZoneList<Expression*>* args = expr->arguments();
3013 VisitForAccumulatorValue(args->at(0));
3014
3015 Label materialize_true, materialize_false;
3016 Label* if_true = NULL;
3017 Label* if_false = NULL;
3018 Label* fall_through = NULL;
3019 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3020 &if_false, &fall_through);
3021
3022 __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset));
3023 __ AndP(r0, r2, Operand(String::kContainsCachedArrayIndexMask));
3024 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3025 Split(eq, if_true, if_false, fall_through);
3026
3027 context()->Plug(if_true, if_false);
3028}
3029
3030void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3031 ZoneList<Expression*>* args = expr->arguments();
3032 DCHECK(args->length() == 1);
3033 VisitForAccumulatorValue(args->at(0));
3034
3035 __ AssertString(r2);
3036
3037 __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset));
3038 __ IndexFromHash(r2, r2);
3039
3040 context()->Plug(r2);
3041}
3042
3043void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3044 ZoneList<Expression*>* args = expr->arguments();
3045 DCHECK_EQ(1, args->length());
3046 VisitForAccumulatorValue(args->at(0));
3047 __ AssertFunction(r2);
3048 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3049 __ LoadP(r2, FieldMemOperand(r2, Map::kPrototypeOffset));
3050 context()->Plug(r2);
3051}
3052
Ben Murdochda12d292016-06-02 14:46:10 +01003053void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3054 DCHECK(expr->arguments()->length() == 0);
3055 ExternalReference debug_is_active =
3056 ExternalReference::debug_is_active_address(isolate());
3057 __ mov(ip, Operand(debug_is_active));
3058 __ LoadlB(r2, MemOperand(ip));
3059 __ SmiTag(r2);
3060 context()->Plug(r2);
3061}
3062
3063void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3064 ZoneList<Expression*>* args = expr->arguments();
3065 DCHECK_EQ(2, args->length());
3066 VisitForStackValue(args->at(0));
3067 VisitForStackValue(args->at(1));
3068
3069 Label runtime, done;
3070
Ben Murdochc5610432016-08-08 18:44:38 +01003071 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &runtime,
3072 NO_ALLOCATION_FLAGS);
Ben Murdochda12d292016-06-02 14:46:10 +01003073 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3);
3074 __ Pop(r4, r5);
3075 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
3076 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0);
3077 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
3078 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
3079 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0);
3080 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0);
3081 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3082 __ b(&done);
3083
3084 __ bind(&runtime);
3085 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
3086
3087 __ bind(&done);
3088 context()->Plug(r2);
3089}
3090
3091void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3092 // Push function.
3093 __ LoadNativeContextSlot(expr->context_index(), r2);
3094 PushOperand(r2);
3095
3096 // Push undefined as the receiver.
3097 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3098 PushOperand(r2);
3099}
3100
3101void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3102 ZoneList<Expression*>* args = expr->arguments();
3103 int arg_count = args->length();
3104
3105 SetCallPosition(expr);
3106 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3107 __ mov(r2, Operand(arg_count));
3108 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3109 RelocInfo::CODE_TARGET);
3110 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003111 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01003112}
3113
3114void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3115 switch (expr->op()) {
3116 case Token::DELETE: {
3117 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3118 Property* property = expr->expression()->AsProperty();
3119 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3120
3121 if (property != NULL) {
3122 VisitForStackValue(property->obj());
3123 VisitForStackValue(property->key());
3124 CallRuntimeWithOperands(is_strict(language_mode())
3125 ? Runtime::kDeleteProperty_Strict
3126 : Runtime::kDeleteProperty_Sloppy);
3127 context()->Plug(r2);
3128 } else if (proxy != NULL) {
3129 Variable* var = proxy->var();
3130 // Delete of an unqualified identifier is disallowed in strict mode but
3131 // "delete this" is allowed.
3132 bool is_this = var->HasThisName(isolate());
3133 DCHECK(is_sloppy(language_mode()) || is_this);
3134 if (var->IsUnallocatedOrGlobalSlot()) {
3135 __ LoadGlobalObject(r4);
3136 __ mov(r3, Operand(var->name()));
3137 __ Push(r4, r3);
3138 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3139 context()->Plug(r2);
3140 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3141 // Result of deleting non-global, non-dynamic variables is false.
3142 // The subexpression does not have side effects.
3143 context()->Plug(is_this);
3144 } else {
3145 // Non-global variable. Call the runtime to try to delete from the
3146 // context where the variable was introduced.
3147 __ Push(var->name());
3148 __ CallRuntime(Runtime::kDeleteLookupSlot);
3149 context()->Plug(r2);
3150 }
3151 } else {
3152 // Result of deleting non-property, non-variable reference is true.
3153 // The subexpression may have side effects.
3154 VisitForEffect(expr->expression());
3155 context()->Plug(true);
3156 }
3157 break;
3158 }
3159
3160 case Token::VOID: {
3161 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3162 VisitForEffect(expr->expression());
3163 context()->Plug(Heap::kUndefinedValueRootIndex);
3164 break;
3165 }
3166
3167 case Token::NOT: {
3168 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3169 if (context()->IsEffect()) {
3170 // Unary NOT has no side effects so it's only necessary to visit the
3171 // subexpression. Match the optimizing compiler by not branching.
3172 VisitForEffect(expr->expression());
3173 } else if (context()->IsTest()) {
3174 const TestContext* test = TestContext::cast(context());
3175 // The labels are swapped for the recursive call.
3176 VisitForControl(expr->expression(), test->false_label(),
3177 test->true_label(), test->fall_through());
3178 context()->Plug(test->true_label(), test->false_label());
3179 } else {
3180 // We handle value contexts explicitly rather than simply visiting
3181 // for control and plugging the control flow into the context,
3182 // because we need to prepare a pair of extra administrative AST ids
3183 // for the optimizing compiler.
3184 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3185 Label materialize_true, materialize_false, done;
3186 VisitForControl(expr->expression(), &materialize_false,
3187 &materialize_true, &materialize_true);
3188 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3189 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003190 PrepareForBailoutForId(expr->MaterializeTrueId(),
3191 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01003192 __ LoadRoot(r2, Heap::kTrueValueRootIndex);
3193 if (context()->IsStackValue()) __ push(r2);
3194 __ b(&done);
3195 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003196 PrepareForBailoutForId(expr->MaterializeFalseId(),
3197 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01003198 __ LoadRoot(r2, Heap::kFalseValueRootIndex);
3199 if (context()->IsStackValue()) __ push(r2);
3200 __ bind(&done);
3201 }
3202 break;
3203 }
3204
3205 case Token::TYPEOF: {
3206 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3207 {
3208 AccumulatorValueContext context(this);
3209 VisitForTypeofValue(expr->expression());
3210 }
3211 __ LoadRR(r5, r2);
3212 TypeofStub typeof_stub(isolate());
3213 __ CallStub(&typeof_stub);
3214 context()->Plug(r2);
3215 break;
3216 }
3217
3218 default:
3219 UNREACHABLE();
3220 }
3221}
3222
3223void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3224 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3225
3226 Comment cmnt(masm_, "[ CountOperation");
3227
3228 Property* prop = expr->expression()->AsProperty();
3229 LhsKind assign_type = Property::GetAssignType(prop);
3230
3231 // Evaluate expression and get value.
3232 if (assign_type == VARIABLE) {
3233 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3234 AccumulatorValueContext context(this);
3235 EmitVariableLoad(expr->expression()->AsVariableProxy());
3236 } else {
3237 // Reserve space for result of postfix operation.
3238 if (expr->is_postfix() && !context()->IsEffect()) {
3239 __ LoadSmiLiteral(ip, Smi::FromInt(0));
3240 PushOperand(ip);
3241 }
3242 switch (assign_type) {
3243 case NAMED_PROPERTY: {
3244 // Put the object both on the stack and in the register.
3245 VisitForStackValue(prop->obj());
3246 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3247 EmitNamedPropertyLoad(prop);
3248 break;
3249 }
3250
3251 case NAMED_SUPER_PROPERTY: {
3252 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3253 VisitForAccumulatorValue(
3254 prop->obj()->AsSuperPropertyReference()->home_object());
3255 PushOperand(result_register());
3256 const Register scratch = r3;
3257 __ LoadP(scratch, MemOperand(sp, kPointerSize));
3258 PushOperands(scratch, result_register());
3259 EmitNamedSuperPropertyLoad(prop);
3260 break;
3261 }
3262
3263 case KEYED_SUPER_PROPERTY: {
3264 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3265 VisitForAccumulatorValue(
3266 prop->obj()->AsSuperPropertyReference()->home_object());
3267 const Register scratch = r3;
3268 const Register scratch1 = r4;
3269 __ LoadRR(scratch, result_register());
3270 VisitForAccumulatorValue(prop->key());
3271 PushOperands(scratch, result_register());
3272 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
3273 PushOperands(scratch1, scratch, result_register());
3274 EmitKeyedSuperPropertyLoad(prop);
3275 break;
3276 }
3277
3278 case KEYED_PROPERTY: {
3279 VisitForStackValue(prop->obj());
3280 VisitForStackValue(prop->key());
3281 __ LoadP(LoadDescriptor::ReceiverRegister(),
3282 MemOperand(sp, 1 * kPointerSize));
3283 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3284 EmitKeyedPropertyLoad(prop);
3285 break;
3286 }
3287
3288 case VARIABLE:
3289 UNREACHABLE();
3290 }
3291 }
3292
3293 // We need a second deoptimization point after loading the value
3294 // in case evaluating the property load my have a side effect.
3295 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003296 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003297 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003298 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003299 }
3300
3301 // Inline smi case if we are in a loop.
3302 Label stub_call, done;
3303 JumpPatchSite patch_site(masm_);
3304
3305 int count_value = expr->op() == Token::INC ? 1 : -1;
3306 if (ShouldInlineSmiCase(expr->op())) {
3307 Label slow;
3308 patch_site.EmitJumpIfNotSmi(r2, &slow);
3309
3310 // Save result for postfix expressions.
3311 if (expr->is_postfix()) {
3312 if (!context()->IsEffect()) {
3313 // Save the result on the stack. If we have a named or keyed property
3314 // we store the result under the receiver that is currently on top
3315 // of the stack.
3316 switch (assign_type) {
3317 case VARIABLE:
3318 __ push(r2);
3319 break;
3320 case NAMED_PROPERTY:
3321 __ StoreP(r2, MemOperand(sp, kPointerSize));
3322 break;
3323 case NAMED_SUPER_PROPERTY:
3324 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3325 break;
3326 case KEYED_PROPERTY:
3327 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3328 break;
3329 case KEYED_SUPER_PROPERTY:
3330 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize));
3331 break;
3332 }
3333 }
3334 }
3335
3336 Register scratch1 = r3;
3337 Register scratch2 = r4;
3338 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
3339 __ AddAndCheckForOverflow(r2, r2, scratch1, scratch2, r0);
3340 __ BranchOnNoOverflow(&done);
3341 // Call stub. Undo operation first.
3342 __ SubP(r2, r2, scratch1);
3343 __ b(&stub_call);
3344 __ bind(&slow);
3345 }
3346
3347 // Convert old value into a number.
3348 ToNumberStub convert_stub(isolate());
3349 __ CallStub(&convert_stub);
Ben Murdochc5610432016-08-08 18:44:38 +01003350 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003351
3352 // Save result for postfix expressions.
3353 if (expr->is_postfix()) {
3354 if (!context()->IsEffect()) {
3355 // Save the result on the stack. If we have a named or keyed property
3356 // we store the result under the receiver that is currently on top
3357 // of the stack.
3358 switch (assign_type) {
3359 case VARIABLE:
3360 PushOperand(r2);
3361 break;
3362 case NAMED_PROPERTY:
3363 __ StoreP(r2, MemOperand(sp, kPointerSize));
3364 break;
3365 case NAMED_SUPER_PROPERTY:
3366 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3367 break;
3368 case KEYED_PROPERTY:
3369 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3370 break;
3371 case KEYED_SUPER_PROPERTY:
3372 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize));
3373 break;
3374 }
3375 }
3376 }
3377
3378 __ bind(&stub_call);
3379 __ LoadRR(r3, r2);
3380 __ LoadSmiLiteral(r2, Smi::FromInt(count_value));
3381
3382 SetExpressionPosition(expr);
3383
3384 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3385 CallIC(code, expr->CountBinOpFeedbackId());
3386 patch_site.EmitPatchInfo();
3387 __ bind(&done);
3388
3389 // Store the value returned in r2.
3390 switch (assign_type) {
3391 case VARIABLE:
3392 if (expr->is_postfix()) {
3393 {
3394 EffectContext context(this);
3395 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3396 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003397 PrepareForBailoutForId(expr->AssignmentId(),
3398 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003399 context.Plug(r2);
3400 }
3401 // For all contexts except EffectConstant We have the result on
3402 // top of the stack.
3403 if (!context()->IsEffect()) {
3404 context()->PlugTOS();
3405 }
3406 } else {
3407 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3408 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003409 PrepareForBailoutForId(expr->AssignmentId(),
3410 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003411 context()->Plug(r2);
3412 }
3413 break;
3414 case NAMED_PROPERTY: {
3415 __ mov(StoreDescriptor::NameRegister(),
3416 Operand(prop->key()->AsLiteral()->value()));
3417 PopOperand(StoreDescriptor::ReceiverRegister());
3418 EmitLoadStoreICSlot(expr->CountSlot());
3419 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003420 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003421 if (expr->is_postfix()) {
3422 if (!context()->IsEffect()) {
3423 context()->PlugTOS();
3424 }
3425 } else {
3426 context()->Plug(r2);
3427 }
3428 break;
3429 }
3430 case NAMED_SUPER_PROPERTY: {
3431 EmitNamedSuperPropertyStore(prop);
3432 if (expr->is_postfix()) {
3433 if (!context()->IsEffect()) {
3434 context()->PlugTOS();
3435 }
3436 } else {
3437 context()->Plug(r2);
3438 }
3439 break;
3440 }
3441 case KEYED_SUPER_PROPERTY: {
3442 EmitKeyedSuperPropertyStore(prop);
3443 if (expr->is_postfix()) {
3444 if (!context()->IsEffect()) {
3445 context()->PlugTOS();
3446 }
3447 } else {
3448 context()->Plug(r2);
3449 }
3450 break;
3451 }
3452 case KEYED_PROPERTY: {
3453 PopOperands(StoreDescriptor::ReceiverRegister(),
3454 StoreDescriptor::NameRegister());
3455 Handle<Code> ic =
3456 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3457 EmitLoadStoreICSlot(expr->CountSlot());
3458 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003459 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003460 if (expr->is_postfix()) {
3461 if (!context()->IsEffect()) {
3462 context()->PlugTOS();
3463 }
3464 } else {
3465 context()->Plug(r2);
3466 }
3467 break;
3468 }
3469 }
3470}
3471
3472void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3473 Expression* sub_expr,
3474 Handle<String> check) {
3475 Label materialize_true, materialize_false;
3476 Label* if_true = NULL;
3477 Label* if_false = NULL;
3478 Label* fall_through = NULL;
3479 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3480 &if_false, &fall_through);
3481
3482 {
3483 AccumulatorValueContext context(this);
3484 VisitForTypeofValue(sub_expr);
3485 }
3486 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3487
3488 Factory* factory = isolate()->factory();
3489 if (String::Equals(check, factory->number_string())) {
3490 __ JumpIfSmi(r2, if_true);
3491 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3492 __ CompareRoot(r2, Heap::kHeapNumberMapRootIndex);
3493 Split(eq, if_true, if_false, fall_through);
3494 } else if (String::Equals(check, factory->string_string())) {
3495 __ JumpIfSmi(r2, if_false);
3496 __ CompareObjectType(r2, r2, r3, FIRST_NONSTRING_TYPE);
3497 Split(lt, if_true, if_false, fall_through);
3498 } else if (String::Equals(check, factory->symbol_string())) {
3499 __ JumpIfSmi(r2, if_false);
3500 __ CompareObjectType(r2, r2, r3, SYMBOL_TYPE);
3501 Split(eq, if_true, if_false, fall_through);
3502 } else if (String::Equals(check, factory->boolean_string())) {
3503 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3504 __ beq(if_true);
3505 __ CompareRoot(r2, Heap::kFalseValueRootIndex);
3506 Split(eq, if_true, if_false, fall_through);
3507 } else if (String::Equals(check, factory->undefined_string())) {
3508 __ CompareRoot(r2, Heap::kNullValueRootIndex);
3509 __ beq(if_false);
3510 __ JumpIfSmi(r2, if_false);
3511 // Check for undetectable objects => true.
3512 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3513 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset),
3514 Operand(1 << Map::kIsUndetectable));
3515 Split(ne, if_true, if_false, fall_through);
3516
3517 } else if (String::Equals(check, factory->function_string())) {
3518 __ JumpIfSmi(r2, if_false);
3519 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3520 __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
3521 __ AndP(r3, r3,
3522 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3523 __ CmpP(r3, Operand(1 << Map::kIsCallable));
3524 Split(eq, if_true, if_false, fall_through);
3525 } else if (String::Equals(check, factory->object_string())) {
3526 __ JumpIfSmi(r2, if_false);
3527 __ CompareRoot(r2, Heap::kNullValueRootIndex);
3528 __ beq(if_true);
3529 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3530 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE);
3531 __ blt(if_false);
3532 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset),
3533 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3534 Split(eq, if_true, if_false, fall_through);
3535// clang-format off
3536#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3537 } else if (String::Equals(check, factory->type##_string())) { \
3538 __ JumpIfSmi(r2, if_false); \
3539 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); \
3540 __ CompareRoot(r2, Heap::k##Type##MapRootIndex); \
3541 Split(eq, if_true, if_false, fall_through);
3542 SIMD128_TYPES(SIMD128_TYPE)
3543#undef SIMD128_TYPE
3544 // clang-format on
3545 } else {
3546 if (if_false != fall_through) __ b(if_false);
3547 }
3548 context()->Plug(if_true, if_false);
3549}
3550
3551void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3552 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdochda12d292016-06-02 14:46:10 +01003553
3554 // First we try a fast inlined version of the compare when one of
3555 // the operands is a literal.
3556 if (TryLiteralCompare(expr)) return;
3557
3558 // Always perform the comparison for its control flow. Pack the result
3559 // into the expression's context after the comparison is performed.
3560 Label materialize_true, materialize_false;
3561 Label* if_true = NULL;
3562 Label* if_false = NULL;
3563 Label* fall_through = NULL;
3564 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3565 &if_false, &fall_through);
3566
3567 Token::Value op = expr->op();
3568 VisitForStackValue(expr->left());
3569 switch (op) {
3570 case Token::IN:
3571 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003572 SetExpressionPosition(expr);
3573 EmitHasProperty();
Ben Murdochda12d292016-06-02 14:46:10 +01003574 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3575 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3576 Split(eq, if_true, if_false, fall_through);
3577 break;
3578
3579 case Token::INSTANCEOF: {
3580 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003581 SetExpressionPosition(expr);
Ben Murdochda12d292016-06-02 14:46:10 +01003582 PopOperand(r3);
3583 InstanceOfStub stub(isolate());
3584 __ CallStub(&stub);
3585 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3586 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3587 Split(eq, if_true, if_false, fall_through);
3588 break;
3589 }
3590
3591 default: {
3592 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003593 SetExpressionPosition(expr);
Ben Murdochda12d292016-06-02 14:46:10 +01003594 Condition cond = CompareIC::ComputeCondition(op);
3595 PopOperand(r3);
3596
3597 bool inline_smi_code = ShouldInlineSmiCase(op);
3598 JumpPatchSite patch_site(masm_);
3599 if (inline_smi_code) {
3600 Label slow_case;
3601 __ LoadRR(r4, r3);
3602 __ OrP(r4, r2);
3603 patch_site.EmitJumpIfNotSmi(r4, &slow_case);
3604 __ CmpP(r3, r2);
3605 Split(cond, if_true, if_false, NULL);
3606 __ bind(&slow_case);
3607 }
3608
3609 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3610 CallIC(ic, expr->CompareOperationFeedbackId());
3611 patch_site.EmitPatchInfo();
3612 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3613 __ CmpP(r2, Operand::Zero());
3614 Split(cond, if_true, if_false, fall_through);
3615 }
3616 }
3617
3618 // Convert the result of the comparison into one expected for this
3619 // expression's context.
3620 context()->Plug(if_true, if_false);
3621}
3622
3623void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3624 Expression* sub_expr,
3625 NilValue nil) {
3626 Label materialize_true, materialize_false;
3627 Label* if_true = NULL;
3628 Label* if_false = NULL;
3629 Label* fall_through = NULL;
3630 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3631 &if_false, &fall_through);
3632
3633 VisitForAccumulatorValue(sub_expr);
3634 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3635 if (expr->op() == Token::EQ_STRICT) {
3636 Heap::RootListIndex nil_value = nil == kNullValue
3637 ? Heap::kNullValueRootIndex
3638 : Heap::kUndefinedValueRootIndex;
3639 __ CompareRoot(r2, nil_value);
3640 Split(eq, if_true, if_false, fall_through);
3641 } else {
3642 __ JumpIfSmi(r2, if_false);
3643 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3644 __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
3645 __ AndP(r0, r3, Operand(1 << Map::kIsUndetectable));
3646 Split(ne, if_true, if_false, fall_through);
3647 }
3648 context()->Plug(if_true, if_false);
3649}
3650Register FullCodeGenerator::result_register() { return r2; }
3651
3652Register FullCodeGenerator::context_register() { return cp; }
3653
3654void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3655 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3656 __ LoadP(value, MemOperand(fp, frame_offset));
3657}
3658
3659void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3660 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3661 __ StoreP(value, MemOperand(fp, frame_offset));
3662}
3663
3664void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3665 __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
3666}
3667
3668void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3669 Scope* closure_scope = scope()->ClosureScope();
3670 if (closure_scope->is_script_scope() || closure_scope->is_module_scope()) {
3671 // Contexts nested in the native context have a canonical empty function
3672 // as their closure, not the anonymous closure containing the global
3673 // code.
3674 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3675 } else if (closure_scope->is_eval_scope()) {
3676 // Contexts created by a call to eval have the same closure as the
3677 // context calling eval, not the anonymous closure containing the eval
3678 // code. Fetch it from the context.
3679 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3680 } else {
3681 DCHECK(closure_scope->is_function_scope());
3682 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3683 }
3684 PushOperand(ip);
3685}
3686
3687// ----------------------------------------------------------------------------
3688// Non-local control flow support.
3689
3690void FullCodeGenerator::EnterFinallyBlock() {
3691 DCHECK(!result_register().is(r3));
3692 // Store pending message while executing finally block.
3693 ExternalReference pending_message_obj =
3694 ExternalReference::address_of_pending_message_obj(isolate());
3695 __ mov(ip, Operand(pending_message_obj));
3696 __ LoadP(r3, MemOperand(ip));
3697 PushOperand(r3);
3698
3699 ClearPendingMessage();
3700}
3701
3702void FullCodeGenerator::ExitFinallyBlock() {
3703 DCHECK(!result_register().is(r3));
3704 // Restore pending message from stack.
3705 PopOperand(r3);
3706 ExternalReference pending_message_obj =
3707 ExternalReference::address_of_pending_message_obj(isolate());
3708 __ mov(ip, Operand(pending_message_obj));
3709 __ StoreP(r3, MemOperand(ip));
3710}
3711
3712void FullCodeGenerator::ClearPendingMessage() {
3713 DCHECK(!result_register().is(r3));
3714 ExternalReference pending_message_obj =
3715 ExternalReference::address_of_pending_message_obj(isolate());
3716 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
3717 __ mov(ip, Operand(pending_message_obj));
3718 __ StoreP(r3, MemOperand(ip));
3719}
3720
3721void FullCodeGenerator::DeferredCommands::EmitCommands() {
3722 DCHECK(!result_register().is(r3));
3723 // Restore the accumulator (r2) and token (r3).
3724 __ Pop(r3, result_register());
3725 for (DeferredCommand cmd : commands_) {
3726 Label skip;
3727 __ CmpSmiLiteral(r3, Smi::FromInt(cmd.token), r0);
3728 __ bne(&skip);
3729 switch (cmd.command) {
3730 case kReturn:
3731 codegen_->EmitUnwindAndReturn();
3732 break;
3733 case kThrow:
3734 __ Push(result_register());
3735 __ CallRuntime(Runtime::kReThrow);
3736 break;
3737 case kContinue:
3738 codegen_->EmitContinue(cmd.target);
3739 break;
3740 case kBreak:
3741 codegen_->EmitBreak(cmd.target);
3742 break;
3743 }
3744 __ bind(&skip);
3745 }
3746}
3747
3748#undef __
3749
3750#if V8_TARGET_ARCH_S390X
3751static const FourByteInstr kInterruptBranchInstruction = 0xA7A40011;
3752static const FourByteInstr kOSRBranchInstruction = 0xA7040011;
3753static const int16_t kBackEdgeBranchOffset = 0x11 * 2;
3754#else
3755static const FourByteInstr kInterruptBranchInstruction = 0xA7A4000D;
3756static const FourByteInstr kOSRBranchInstruction = 0xA704000D;
3757static const int16_t kBackEdgeBranchOffset = 0xD * 2;
3758#endif
3759
3760void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
3761 BackEdgeState target_state,
3762 Code* replacement_code) {
3763 Address call_address = Assembler::target_address_from_return_address(pc);
3764 Address branch_address = call_address - 4;
3765 Isolate* isolate = unoptimized_code->GetIsolate();
3766 CodePatcher patcher(isolate, branch_address, 4);
3767
3768 switch (target_state) {
3769 case INTERRUPT: {
3770 // <decrement profiling counter>
3771 // bge <ok> ;; patched to GE BRC
3772 // brasrl r14, <interrupt stub address>
3773 // <reset profiling counter>
3774 // ok-label
3775 patcher.masm()->brc(ge, Operand(kBackEdgeBranchOffset));
3776 break;
3777 }
3778 case ON_STACK_REPLACEMENT:
3779 // <decrement profiling counter>
3780 // brc 0x0, <ok> ;; patched to NOP BRC
3781 // brasrl r14, <interrupt stub address>
3782 // <reset profiling counter>
3783 // ok-label ----- pc_after points here
3784 patcher.masm()->brc(CC_NOP, Operand(kBackEdgeBranchOffset));
3785 break;
3786 }
3787
3788 // Replace the stack check address in the mov sequence with the
3789 // entry address of the replacement code.
3790 Assembler::set_target_address_at(isolate, call_address, unoptimized_code,
3791 replacement_code->entry());
3792
3793 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3794 unoptimized_code, call_address, replacement_code);
3795}
3796
3797BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3798 Isolate* isolate, Code* unoptimized_code, Address pc) {
3799 Address call_address = Assembler::target_address_from_return_address(pc);
3800 Address branch_address = call_address - 4;
3801#ifdef DEBUG
3802 Address interrupt_address =
3803 Assembler::target_address_at(call_address, unoptimized_code);
3804#endif
3805
3806 DCHECK(BRC == Instruction::S390OpcodeValue(branch_address));
3807 // For interrupt, we expect a branch greater than or equal
3808 // i.e. BRC 0xa, +XXXX (0xA7A4XXXX)
3809 FourByteInstr br_instr = Instruction::InstructionBits(
3810 reinterpret_cast<const byte*>(branch_address));
3811 if (kInterruptBranchInstruction == br_instr) {
3812 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
3813 return INTERRUPT;
3814 }
3815
3816 // Expect BRC to be patched to NOP branch.
3817 // i.e. BRC 0x0, +XXXX (0xA704XXXX)
3818 USE(kOSRBranchInstruction);
3819 DCHECK(kOSRBranchInstruction == br_instr);
3820
3821 DCHECK(interrupt_address ==
3822 isolate->builtins()->OnStackReplacement()->entry());
3823 return ON_STACK_REPLACEMENT;
3824}
3825
3826} // namespace internal
3827} // namespace v8
3828#endif // V8_TARGET_ARCH_S390