blob: ee0b3e3fbef0bd7953b96ecb1b730a98c4a45167 [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_S390
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/s390/code-stubs-s390.h"
17#include "src/s390/macro-assembler-s390.h"
18
19namespace v8 {
20namespace internal {
21
22#define __ ACCESS_MASM(masm())
23
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30// See PatchInlinedSmiCode in ic-s390.cc for the code that patches it
31class JumpPatchSite BASE_EMBEDDED {
32 public:
33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
34#ifdef DEBUG
35 info_emitted_ = false;
36#endif
37 }
38
39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
40
41 // When initially emitting this ensure that a jump is always generated to skip
42 // the inlined smi code.
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 DCHECK(!patch_site_.is_bound() && !info_emitted_);
45 __ bind(&patch_site_);
46 __ CmpP(reg, reg);
47// Emit the Nop to make bigger place for patching on 31-bit
48// as the TestIfSmi sequence uses 4-byte TMLL
49#ifndef V8_TARGET_ARCH_S390X
50 __ nop();
51#endif
52 __ beq(target); // Always taken before patched.
53 }
54
55 // When initially emitting this ensure that a jump is never generated to skip
56 // the inlined smi code.
57 void EmitJumpIfSmi(Register reg, Label* target) {
58 DCHECK(!patch_site_.is_bound() && !info_emitted_);
59 __ bind(&patch_site_);
60 __ CmpP(reg, reg);
61// Emit the Nop to make bigger place for patching on 31-bit
62// as the TestIfSmi sequence uses 4-byte TMLL
63#ifndef V8_TARGET_ARCH_S390X
64 __ nop();
65#endif
66 __ bne(target); // Never taken before patched.
67 }
68
69 void EmitPatchInfo() {
70 if (patch_site_.is_bound()) {
71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
72 DCHECK(is_int16(delta_to_patch_site));
73 __ chi(r0, Operand(delta_to_patch_site));
74#ifdef DEBUG
75 info_emitted_ = true;
76#endif
77 } else {
78 __ nop();
79 __ nop();
80 }
81 }
82
83 private:
84 MacroAssembler* masm() { return masm_; }
85 MacroAssembler* masm_;
86 Label patch_site_;
87#ifdef DEBUG
88 bool info_emitted_;
89#endif
90};
91
92// Generate code for a JS function. On entry to the function the receiver
93// and arguments have been pushed on the stack left to right. The actual
94// argument count matches the formal parameter count expected by the
95// function.
96//
97// The live registers are:
98// o r3: the JS function object being called (i.e., ourselves)
99// o r5: the new target value
100// o cp: our context
101// o fp: our caller's frame pointer
102// o sp: stack pointer
103// o lr: return address
104// o ip: our own function entry (required by the prologue)
105//
106// The function builds a JS frame. Please see JavaScriptFrameConstants in
107// frames-s390.h for its layout.
108void FullCodeGenerator::Generate() {
109 CompilationInfo* info = info_;
110 profiling_counter_ = isolate()->factory()->NewCell(
111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112 SetFunctionPosition(literal());
113 Comment cmnt(masm_, "[ function compiled by full code generator");
114
115 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
116
117 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
118 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
119 __ LoadP(r4, MemOperand(sp, receiver_offset), r0);
120 __ AssertNotSmi(r4);
121 __ CompareObjectType(r4, r4, no_reg, FIRST_JS_RECEIVER_TYPE);
122 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
123 }
124
125 // Open a frame scope to indicate that there is a frame on the stack. The
126 // MANUAL indicates that the scope shouldn't actually generate code to set up
127 // the frame (that is done below).
128 FrameScope frame_scope(masm_, StackFrame::MANUAL);
129 int prologue_offset = masm_->pc_offset();
130
131 info->set_prologue_offset(prologue_offset);
132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
133
134 {
135 Comment cmnt(masm_, "[ Allocate locals");
136 int locals_count = info->scope()->num_stack_slots();
137 // Generators allocate locals, if any, in context slots.
138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
139 OperandStackDepthIncrement(locals_count);
140 if (locals_count > 0) {
141 if (locals_count >= 128) {
142 Label ok;
143 __ AddP(ip, sp, Operand(-(locals_count * kPointerSize)));
144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
145 __ CmpLogicalP(ip, r5);
146 __ bge(&ok, Label::kNear);
147 __ CallRuntime(Runtime::kThrowStackOverflow);
148 __ bind(&ok);
149 }
150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
152 if (locals_count >= kMaxPushes) {
153 int loop_iterations = locals_count / kMaxPushes;
154 __ mov(r4, Operand(loop_iterations));
155 Label loop_header;
156 __ bind(&loop_header);
157 // Do pushes.
158 // TODO(joransiu): Use MVC for better performance
159 __ lay(sp, MemOperand(sp, -kMaxPushes * kPointerSize));
160 for (int i = 0; i < kMaxPushes; i++) {
161 __ StoreP(ip, MemOperand(sp, i * kPointerSize));
162 }
163 // Continue loop if not done.
164 __ BranchOnCount(r4, &loop_header);
165 }
166 int remaining = locals_count % kMaxPushes;
167 // Emit the remaining pushes.
168 // TODO(joransiu): Use MVC for better performance
169 if (remaining > 0) {
170 __ lay(sp, MemOperand(sp, -remaining * kPointerSize));
171 for (int i = 0; i < remaining; i++) {
172 __ StoreP(ip, MemOperand(sp, i * kPointerSize));
173 }
174 }
175 }
176 }
177
178 bool function_in_register_r3 = true;
179
180 // Possibly allocate a local context.
181 if (info->scope()->num_heap_slots() > 0) {
182 // Argument to NewContext is the function, which is still in r3.
183 Comment cmnt(masm_, "[ Allocate context");
184 bool need_write_barrier = true;
185 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186 if (info->scope()->is_script_scope()) {
187 __ push(r3);
188 __ Push(info->scope()->GetScopeInfo(info->isolate()));
189 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100190 PrepareForBailoutForId(BailoutId::ScriptContext(),
191 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100192 // The new target value is not used, clobbering is safe.
193 DCHECK_NULL(info->scope()->new_target_var());
194 } else {
195 if (info->scope()->new_target_var() != nullptr) {
196 __ push(r5); // Preserve new target.
197 }
198 if (slots <= FastNewContextStub::kMaximumSlots) {
199 FastNewContextStub stub(isolate(), slots);
200 __ CallStub(&stub);
201 // Result of FastNewContextStub is always in new space.
202 need_write_barrier = false;
203 } else {
204 __ push(r3);
205 __ CallRuntime(Runtime::kNewFunctionContext);
206 }
207 if (info->scope()->new_target_var() != nullptr) {
208 __ pop(r5); // Preserve new target.
209 }
210 }
211 function_in_register_r3 = false;
212 // Context is returned in r2. It replaces the context passed to us.
213 // It's saved in the stack and kept live in cp.
214 __ LoadRR(cp, r2);
215 __ StoreP(r2, MemOperand(fp, StandardFrameConstants::kContextOffset));
216 // Copy any necessary parameters into the context.
217 int num_parameters = info->scope()->num_parameters();
218 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
219 for (int i = first_parameter; i < num_parameters; i++) {
220 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
221 if (var->IsContextSlot()) {
222 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223 (num_parameters - 1 - i) * kPointerSize;
224 // Load parameter from stack.
225 __ LoadP(r2, MemOperand(fp, parameter_offset), r0);
226 // Store it in the context.
227 MemOperand target = ContextMemOperand(cp, var->index());
228 __ StoreP(r2, target);
229
230 // Update the write barrier.
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(cp, target.offset(), r2, r4,
233 kLRHasBeenSaved, kDontSaveFPRegs);
234 } else if (FLAG_debug_code) {
235 Label done;
236 __ JumpIfInNewSpace(cp, r2, &done);
237 __ Abort(kExpectedNewSpaceObject);
238 __ bind(&done);
239 }
240 }
241 }
242 }
243
244 // Register holding this function and new target are both trashed in case we
245 // bailout here. But since that can happen only when new target is not used
246 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100247 PrepareForBailoutForId(BailoutId::FunctionContext(),
248 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100249
250 // Possibly set up a local binding to the this function which is used in
251 // derived constructors with super calls.
252 Variable* this_function_var = scope()->this_function_var();
253 if (this_function_var != nullptr) {
254 Comment cmnt(masm_, "[ This function");
255 if (!function_in_register_r3) {
256 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
257 // The write barrier clobbers register again, keep it marked as such.
258 }
259 SetVar(this_function_var, r3, r2, r4);
260 }
261
262 // Possibly set up a local binding to the new target value.
263 Variable* new_target_var = scope()->new_target_var();
264 if (new_target_var != nullptr) {
265 Comment cmnt(masm_, "[ new.target");
266 SetVar(new_target_var, r5, r2, r4);
267 }
268
269 // Possibly allocate RestParameters
270 int rest_index;
271 Variable* rest_param = scope()->rest_parameter(&rest_index);
272 if (rest_param) {
273 Comment cmnt(masm_, "[ Allocate rest parameter array");
274
275 if (!function_in_register_r3) {
276 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
277 }
278 FastNewRestParameterStub stub(isolate());
279 __ CallStub(&stub);
280
281 function_in_register_r3 = false;
282 SetVar(rest_param, r2, r3, r4);
283 }
284
285 Variable* arguments = scope()->arguments();
286 if (arguments != NULL) {
287 // Function uses arguments object.
288 Comment cmnt(masm_, "[ Allocate arguments object");
289 if (!function_in_register_r3) {
290 // Load this again, if it's used by the local context below.
291 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
292 }
293 if (is_strict(language_mode()) || !has_simple_parameters()) {
294 FastNewStrictArgumentsStub stub(isolate());
295 __ CallStub(&stub);
296 } else if (literal()->has_duplicate_parameters()) {
297 __ Push(r3);
298 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
299 } else {
300 FastNewSloppyArgumentsStub stub(isolate());
301 __ CallStub(&stub);
302 }
303
304 SetVar(arguments, r2, r3, r4);
305 }
306
307 if (FLAG_trace) {
308 __ CallRuntime(Runtime::kTraceEnter);
309 }
310
311 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100312 PrepareForBailoutForId(BailoutId::FunctionEntry(),
313 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100314 {
315 Comment cmnt(masm_, "[ Declarations");
316 VisitDeclarations(scope()->declarations());
317 }
318
319 // Assert that the declarations do not use ICs. Otherwise the debugger
320 // won't be able to redirect a PC at an IC to the correct IC in newly
321 // recompiled code.
322 DCHECK_EQ(0, ic_total_count_);
323
324 {
325 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100326 PrepareForBailoutForId(BailoutId::Declarations(),
327 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100328 Label ok;
329 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
330 __ CmpLogicalP(sp, ip);
331 __ bge(&ok, Label::kNear);
332 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
333 __ bind(&ok);
334 }
335
336 {
337 Comment cmnt(masm_, "[ Body");
338 DCHECK(loop_depth() == 0);
339 VisitStatements(literal()->body());
340 DCHECK(loop_depth() == 0);
341 }
342
343 // Always emit a 'return undefined' in case control fell off the end of
344 // the body.
345 {
346 Comment cmnt(masm_, "[ return <undefined>;");
347 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
348 }
349 EmitReturnSequence();
350}
351
352void FullCodeGenerator::ClearAccumulator() {
353 __ LoadSmiLiteral(r2, Smi::FromInt(0));
354}
355
356void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
357 __ mov(r4, Operand(profiling_counter_));
358 intptr_t smi_delta = reinterpret_cast<intptr_t>(Smi::FromInt(delta));
359 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT) && is_int8(-smi_delta)) {
360 __ AddP(FieldMemOperand(r4, Cell::kValueOffset), Operand(-smi_delta));
361 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset));
362 } else {
363 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset));
364 __ SubSmiLiteral(r5, r5, Smi::FromInt(delta), r0);
365 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset));
366 }
367}
368
369void FullCodeGenerator::EmitProfilingCounterReset() {
370 int reset_value = FLAG_interrupt_budget;
371 __ mov(r4, Operand(profiling_counter_));
372 __ LoadSmiLiteral(r5, Smi::FromInt(reset_value));
373 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset));
374}
375
376void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
377 Label* back_edge_target) {
378 Comment cmnt(masm_, "[ Back edge bookkeeping");
379 Label ok;
380
381 DCHECK(back_edge_target->is_bound());
382 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
383 kCodeSizeMultiplier / 2;
384 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
385 EmitProfilingCounterDecrement(weight);
386 {
387 // BackEdgeTable::PatchAt manipulates this sequence.
388 __ bge(&ok, Label::kNear);
389 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
390
391 // Record a mapping of this PC offset to the OSR id. This is used to find
392 // the AST id from the unoptimized code in order to use it as a key into
393 // the deoptimization input data found in the optimized code.
394 RecordBackEdge(stmt->OsrEntryId());
395 }
396 EmitProfilingCounterReset();
397
398 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100399 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100400 // Record a mapping of the OSR id to this PC. This is used if the OSR
401 // entry becomes the target of a bailout. We don't expect it to be, but
402 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100403 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100404}
405
406void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
407 bool is_tail_call) {
408 // Pretend that the exit is a backwards jump to the entry.
409 int weight = 1;
410 if (info_->ShouldSelfOptimize()) {
411 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
412 } else {
413 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
414 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
415 }
416 EmitProfilingCounterDecrement(weight);
417 Label ok;
418 __ CmpP(r5, Operand::Zero());
419 __ bge(&ok);
420 // Don't need to save result register if we are going to do a tail call.
421 if (!is_tail_call) {
422 __ push(r2);
423 }
424 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
425 if (!is_tail_call) {
426 __ pop(r2);
427 }
428 EmitProfilingCounterReset();
429 __ bind(&ok);
430}
431
432void FullCodeGenerator::EmitReturnSequence() {
433 Comment cmnt(masm_, "[ Return sequence");
434 if (return_label_.is_bound()) {
435 __ b(&return_label_);
436 } else {
437 __ bind(&return_label_);
438 if (FLAG_trace) {
439 // Push the return value on the stack as the parameter.
440 // Runtime::TraceExit returns its parameter in r2
441 __ push(r2);
442 __ CallRuntime(Runtime::kTraceExit);
443 }
444 EmitProfilingCounterHandlingForReturnSequence(false);
445
446 // Make sure that the constant pool is not emitted inside of the return
447 // sequence.
448 {
449 // Here we use masm_-> instead of the __ macro to avoid the code coverage
450 // tool from instrumenting as we rely on the code size here.
451 int32_t arg_count = info_->scope()->num_parameters() + 1;
452 int32_t sp_delta = arg_count * kPointerSize;
453 SetReturnPosition(literal());
454 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
455
456 __ Ret();
457 }
458 }
459}
460
Ben Murdochc5610432016-08-08 18:44:38 +0100461void FullCodeGenerator::RestoreContext() {
462 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
463}
464
Ben Murdochda12d292016-06-02 14:46:10 +0100465void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
466 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
467 codegen()->GetVar(result_register(), var);
468 codegen()->PushOperand(result_register());
469}
470
471void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
472
473void FullCodeGenerator::AccumulatorValueContext::Plug(
474 Heap::RootListIndex index) const {
475 __ LoadRoot(result_register(), index);
476}
477
478void FullCodeGenerator::StackValueContext::Plug(
479 Heap::RootListIndex index) const {
480 __ LoadRoot(result_register(), index);
481 codegen()->PushOperand(result_register());
482}
483
484void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
485 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
486 false_label_);
487 if (index == Heap::kUndefinedValueRootIndex ||
488 index == Heap::kNullValueRootIndex ||
489 index == Heap::kFalseValueRootIndex) {
490 if (false_label_ != fall_through_) __ b(false_label_);
491 } else if (index == Heap::kTrueValueRootIndex) {
492 if (true_label_ != fall_through_) __ b(true_label_);
493 } else {
494 __ LoadRoot(result_register(), index);
495 codegen()->DoTest(this);
496 }
497}
498
499void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
500
501void FullCodeGenerator::AccumulatorValueContext::Plug(
502 Handle<Object> lit) const {
503 __ mov(result_register(), Operand(lit));
504}
505
506void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
507 // Immediates cannot be pushed directly.
508 __ mov(result_register(), Operand(lit));
509 codegen()->PushOperand(result_register());
510}
511
512void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
513 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
514 false_label_);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100515 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
516 !lit->IsUndetectable());
517 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
518 lit->IsFalse(isolate())) {
Ben Murdochda12d292016-06-02 14:46:10 +0100519 if (false_label_ != fall_through_) __ b(false_label_);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100520 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100521 if (true_label_ != fall_through_) __ b(true_label_);
522 } else if (lit->IsString()) {
523 if (String::cast(*lit)->length() == 0) {
524 if (false_label_ != fall_through_) __ b(false_label_);
525 } else {
526 if (true_label_ != fall_through_) __ b(true_label_);
527 }
528 } else if (lit->IsSmi()) {
529 if (Smi::cast(*lit)->value() == 0) {
530 if (false_label_ != fall_through_) __ b(false_label_);
531 } else {
532 if (true_label_ != fall_through_) __ b(true_label_);
533 }
534 } else {
535 // For simplicity we always test the accumulator register.
536 __ mov(result_register(), Operand(lit));
537 codegen()->DoTest(this);
538 }
539}
540
541void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
542 Register reg) const {
543 DCHECK(count > 0);
544 if (count > 1) codegen()->DropOperands(count - 1);
545 __ StoreP(reg, MemOperand(sp, 0));
546}
547
548void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
549 Label* materialize_false) const {
550 DCHECK(materialize_true == materialize_false);
551 __ bind(materialize_true);
552}
553
554void FullCodeGenerator::AccumulatorValueContext::Plug(
555 Label* materialize_true, Label* materialize_false) const {
556 Label done;
557 __ bind(materialize_true);
558 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
559 __ b(&done, Label::kNear);
560 __ bind(materialize_false);
561 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
562 __ bind(&done);
563}
564
565void FullCodeGenerator::StackValueContext::Plug(
566 Label* materialize_true, Label* materialize_false) const {
567 Label done;
568 __ bind(materialize_true);
569 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
570 __ b(&done, Label::kNear);
571 __ bind(materialize_false);
572 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
573 __ bind(&done);
574 codegen()->PushOperand(ip);
575}
576
577void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
578 Label* materialize_false) const {
579 DCHECK(materialize_true == true_label_);
580 DCHECK(materialize_false == false_label_);
581}
582
583void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
584 Heap::RootListIndex value_root_index =
585 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
586 __ LoadRoot(result_register(), value_root_index);
587}
588
589void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
590 Heap::RootListIndex value_root_index =
591 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
592 __ LoadRoot(ip, value_root_index);
593 codegen()->PushOperand(ip);
594}
595
596void FullCodeGenerator::TestContext::Plug(bool flag) const {
597 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
598 false_label_);
599 if (flag) {
600 if (true_label_ != fall_through_) __ b(true_label_);
601 } else {
602 if (false_label_ != fall_through_) __ b(false_label_);
603 }
604}
605
606void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
607 Label* if_false, Label* fall_through) {
608 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
609 CallIC(ic, condition->test_id());
610 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
611 Split(eq, if_true, if_false, fall_through);
612}
613
614void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
615 Label* fall_through) {
616 if (if_false == fall_through) {
617 __ b(cond, if_true);
618 } else if (if_true == fall_through) {
619 __ b(NegateCondition(cond), if_false);
620 } else {
621 __ b(cond, if_true);
622 __ b(if_false);
623 }
624}
625
626MemOperand FullCodeGenerator::StackOperand(Variable* var) {
627 DCHECK(var->IsStackAllocated());
628 // Offset is negative because higher indexes are at lower addresses.
629 int offset = -var->index() * kPointerSize;
630 // Adjust by a (parameter or local) base offset.
631 if (var->IsParameter()) {
632 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
633 } else {
634 offset += JavaScriptFrameConstants::kLocal0Offset;
635 }
636 return MemOperand(fp, offset);
637}
638
639MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
640 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
641 if (var->IsContextSlot()) {
642 int context_chain_length = scope()->ContextChainLength(var->scope());
643 __ LoadContext(scratch, context_chain_length);
644 return ContextMemOperand(scratch, var->index());
645 } else {
646 return StackOperand(var);
647 }
648}
649
650void FullCodeGenerator::GetVar(Register dest, Variable* var) {
651 // Use destination as scratch.
652 MemOperand location = VarOperand(var, dest);
653 __ LoadP(dest, location, r0);
654}
655
656void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
657 Register scratch1) {
658 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
659 DCHECK(!scratch0.is(src));
660 DCHECK(!scratch0.is(scratch1));
661 DCHECK(!scratch1.is(src));
662 MemOperand location = VarOperand(var, scratch0);
663 __ StoreP(src, location);
664
665 // Emit the write barrier code if the location is in the heap.
666 if (var->IsContextSlot()) {
667 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
668 kLRHasBeenSaved, kDontSaveFPRegs);
669 }
670}
671
672void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
673 bool should_normalize,
674 Label* if_true,
675 Label* if_false) {
676 // Only prepare for bailouts before splits if we're in a test
677 // context. Otherwise, we let the Visit function deal with the
678 // preparation to avoid preparing with the same AST id twice.
679 if (!context()->IsTest()) return;
680
681 Label skip;
682 if (should_normalize) __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100683 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100684 if (should_normalize) {
685 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
686 Split(eq, if_true, if_false, NULL);
687 __ bind(&skip);
688 }
689}
690
691void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
692 // The variable in the declaration always resides in the current function
693 // context.
694 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
695 if (FLAG_debug_code) {
696 // Check that we're not inside a with or catch context.
697 __ LoadP(r3, FieldMemOperand(cp, HeapObject::kMapOffset));
698 __ CompareRoot(r3, Heap::kWithContextMapRootIndex);
699 __ Check(ne, kDeclarationInWithContext);
700 __ CompareRoot(r3, Heap::kCatchContextMapRootIndex);
701 __ Check(ne, kDeclarationInCatchContext);
702 }
703}
704
705void FullCodeGenerator::VisitVariableDeclaration(
706 VariableDeclaration* declaration) {
707 // If it was not possible to allocate the variable at compile time, we
708 // need to "declare" it at runtime to make sure it actually exists in the
709 // local context.
710 VariableProxy* proxy = declaration->proxy();
711 VariableMode mode = declaration->mode();
712 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100713 bool hole_init = mode == LET || mode == CONST;
Ben Murdochda12d292016-06-02 14:46:10 +0100714 switch (variable->location()) {
715 case VariableLocation::GLOBAL:
716 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100717 DCHECK(!variable->binding_needs_init());
Ben Murdochda12d292016-06-02 14:46:10 +0100718 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100719 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdochda12d292016-06-02 14:46:10 +0100720 break;
721
722 case VariableLocation::PARAMETER:
723 case VariableLocation::LOCAL:
724 if (hole_init) {
725 Comment cmnt(masm_, "[ VariableDeclaration");
726 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
727 __ StoreP(ip, StackOperand(variable));
728 }
729 break;
730
731 case VariableLocation::CONTEXT:
732 if (hole_init) {
733 Comment cmnt(masm_, "[ VariableDeclaration");
734 EmitDebugCheckDeclarationContext(variable);
735 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
736 __ StoreP(ip, ContextMemOperand(cp, variable->index()));
737 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100738 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100739 }
740 break;
741
742 case VariableLocation::LOOKUP: {
743 Comment cmnt(masm_, "[ VariableDeclaration");
Ben Murdoch61f157c2016-09-16 13:49:30 +0100744 DCHECK_EQ(VAR, mode);
745 DCHECK(!hole_init);
Ben Murdochda12d292016-06-02 14:46:10 +0100746 __ mov(r4, Operand(variable->name()));
Ben Murdoch61f157c2016-09-16 13:49:30 +0100747 __ Push(r4);
748 __ CallRuntime(Runtime::kDeclareEvalVar);
Ben Murdochc5610432016-08-08 18:44:38 +0100749 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100750 break;
751 }
752 }
753}
754
755void FullCodeGenerator::VisitFunctionDeclaration(
756 FunctionDeclaration* declaration) {
757 VariableProxy* proxy = declaration->proxy();
758 Variable* variable = proxy->var();
759 switch (variable->location()) {
760 case VariableLocation::GLOBAL:
761 case VariableLocation::UNALLOCATED: {
762 globals_->Add(variable->name(), zone());
763 Handle<SharedFunctionInfo> function =
764 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
765 // Check for stack-overflow exception.
766 if (function.is_null()) return SetStackOverflow();
767 globals_->Add(function, zone());
768 break;
769 }
770
771 case VariableLocation::PARAMETER:
772 case VariableLocation::LOCAL: {
773 Comment cmnt(masm_, "[ FunctionDeclaration");
774 VisitForAccumulatorValue(declaration->fun());
775 __ StoreP(result_register(), StackOperand(variable));
776 break;
777 }
778
779 case VariableLocation::CONTEXT: {
780 Comment cmnt(masm_, "[ FunctionDeclaration");
781 EmitDebugCheckDeclarationContext(variable);
782 VisitForAccumulatorValue(declaration->fun());
783 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()));
784 int offset = Context::SlotOffset(variable->index());
785 // We know that we have written a function, which is not a smi.
786 __ RecordWriteContextSlot(cp, offset, result_register(), r4,
787 kLRHasBeenSaved, kDontSaveFPRegs,
788 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100789 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100790 break;
791 }
792
793 case VariableLocation::LOOKUP: {
794 Comment cmnt(masm_, "[ FunctionDeclaration");
795 __ mov(r4, Operand(variable->name()));
796 PushOperand(r4);
797 // Push initial value for function declaration.
798 VisitForStackValue(declaration->fun());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100799 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
Ben Murdochc5610432016-08-08 18:44:38 +0100800 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100801 break;
802 }
803 }
804}
805
806void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
807 // Call the runtime to declare the globals.
808 __ mov(r3, Operand(pairs));
809 __ LoadSmiLiteral(r2, Smi::FromInt(DeclareGlobalsFlags()));
810 __ Push(r3, r2);
811 __ CallRuntime(Runtime::kDeclareGlobals);
812 // Return value is ignored.
813}
814
815void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
816 // Call the runtime to declare the modules.
817 __ Push(descriptions);
818 __ CallRuntime(Runtime::kDeclareModules);
819 // Return value is ignored.
820}
821
822void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
823 Comment cmnt(masm_, "[ SwitchStatement");
824 Breakable nested_statement(this, stmt);
825 SetStatementPosition(stmt);
826
827 // Keep the switch value on the stack until a case matches.
828 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100829 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100830
831 ZoneList<CaseClause*>* clauses = stmt->cases();
832 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
833
834 Label next_test; // Recycled for each test.
835 // Compile all the tests with branches to their bodies.
836 for (int i = 0; i < clauses->length(); i++) {
837 CaseClause* clause = clauses->at(i);
838 clause->body_target()->Unuse();
839
840 // The default is not a test, but remember it as final fall through.
841 if (clause->is_default()) {
842 default_clause = clause;
843 continue;
844 }
845
846 Comment cmnt(masm_, "[ Case comparison");
847 __ bind(&next_test);
848 next_test.Unuse();
849
850 // Compile the label expression.
851 VisitForAccumulatorValue(clause->label());
852
853 // Perform the comparison as if via '==='.
854 __ LoadP(r3, MemOperand(sp, 0)); // Switch value.
855 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
856 JumpPatchSite patch_site(masm_);
857 if (inline_smi_code) {
858 Label slow_case;
859 __ LoadRR(r4, r2);
860 __ OrP(r4, r3);
861 patch_site.EmitJumpIfNotSmi(r4, &slow_case);
862
863 __ CmpP(r3, r2);
864 __ bne(&next_test);
865 __ Drop(1); // Switch value is no longer needed.
866 __ b(clause->body_target());
867 __ bind(&slow_case);
868 }
869
870 // Record position before stub call for type feedback.
871 SetExpressionPosition(clause);
872 Handle<Code> ic =
873 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
874 CallIC(ic, clause->CompareId());
875 patch_site.EmitPatchInfo();
876
877 Label skip;
878 __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100879 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100880 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
881 __ bne(&next_test);
882 __ Drop(1);
883 __ b(clause->body_target());
884 __ bind(&skip);
885
886 __ CmpP(r2, Operand::Zero());
887 __ bne(&next_test);
888 __ Drop(1); // Switch value is no longer needed.
889 __ b(clause->body_target());
890 }
891
892 // Discard the test value and jump to the default if present, otherwise to
893 // the end of the statement.
894 __ bind(&next_test);
895 DropOperands(1); // Switch value is no longer needed.
896 if (default_clause == NULL) {
897 __ b(nested_statement.break_label());
898 } else {
899 __ b(default_clause->body_target());
900 }
901
902 // Compile all the case bodies.
903 for (int i = 0; i < clauses->length(); i++) {
904 Comment cmnt(masm_, "[ Case body");
905 CaseClause* clause = clauses->at(i);
906 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100907 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100908 VisitStatements(clause->statements());
909 }
910
911 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100912 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100913}
914
915void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
916 Comment cmnt(masm_, "[ ForInStatement");
917 SetStatementPosition(stmt, SKIP_BREAK);
918
919 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
920
921 // Get the object to enumerate over.
922 SetExpressionAsStatementPosition(stmt->enumerable());
923 VisitForAccumulatorValue(stmt->enumerable());
924 OperandStackDepthIncrement(5);
925
926 Label loop, exit;
927 Iteration loop_statement(this, stmt);
928 increment_loop_depth();
929
930 // If the object is null or undefined, skip over the loop, otherwise convert
931 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
932 Label convert, done_convert;
933 __ JumpIfSmi(r2, &convert);
934 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE);
935 __ bge(&done_convert);
936 __ CompareRoot(r2, Heap::kNullValueRootIndex);
937 __ beq(&exit);
938 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
939 __ beq(&exit);
940 __ bind(&convert);
941 ToObjectStub stub(isolate());
942 __ CallStub(&stub);
943 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +0100944 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100945 __ push(r2);
946
Ben Murdochc5610432016-08-08 18:44:38 +0100947 // Check cache validity in generated code. If we cannot guarantee cache
948 // validity, call the runtime system to check cache validity or get the
949 // property names in a fixed array. Note: Proxies never have an enum cache,
950 // so will always take the slow path.
Ben Murdochda12d292016-06-02 14:46:10 +0100951 Label call_runtime;
952 __ CheckEnumCache(&call_runtime);
953
954 // The enum cache is valid. Load the map of the object being
955 // iterated over and use the cache for the iteration.
956 Label use_cache;
957 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
958 __ b(&use_cache);
959
960 // Get the set of properties to enumerate.
961 __ bind(&call_runtime);
962 __ push(r2); // Duplicate the enumerable object on the stack.
963 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +0100964 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +0100965
966 // If we got a map from the runtime call, we can do a fast
967 // modification check. Otherwise, we got a fixed array, and we have
968 // to do a slow check.
969 Label fixed_array;
970 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
971 __ CompareRoot(r4, Heap::kMetaMapRootIndex);
972 __ bne(&fixed_array);
973
974 // We got a map in register r2. Get the enumeration cache from it.
975 Label no_descriptors;
976 __ bind(&use_cache);
977
978 __ EnumLength(r3, r2);
979 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
980 __ beq(&no_descriptors, Label::kNear);
981
982 __ LoadInstanceDescriptors(r2, r4);
983 __ LoadP(r4, FieldMemOperand(r4, DescriptorArray::kEnumCacheOffset));
984 __ LoadP(r4,
985 FieldMemOperand(r4, DescriptorArray::kEnumCacheBridgeCacheOffset));
986
987 // Set up the four remaining stack slots.
988 __ push(r2); // Map.
989 __ LoadSmiLiteral(r2, Smi::FromInt(0));
990 // Push enumeration cache, enumeration cache length (as smi) and zero.
991 __ Push(r4, r3, r2);
992 __ b(&loop);
993
994 __ bind(&no_descriptors);
995 __ Drop(1);
996 __ b(&exit);
997
998 // We got a fixed array in register r2. Iterate through that.
999 __ bind(&fixed_array);
1000
1001 __ LoadSmiLiteral(r3, Smi::FromInt(1)); // Smi(1) indicates slow check
1002 __ Push(r3, r2); // Smi and array
1003 __ LoadP(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
1004 __ Push(r3); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001005 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001006 __ LoadSmiLiteral(r2, Smi::FromInt(0));
1007 __ Push(r2); // Initial index.
1008
1009 // Generate code for doing the condition check.
1010 __ bind(&loop);
1011 SetExpressionAsStatementPosition(stmt->each());
1012
1013 // Load the current count to r2, load the length to r3.
1014 __ LoadP(r2, MemOperand(sp, 0 * kPointerSize));
1015 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1016 __ CmpLogicalP(r2, r3); // Compare to the array length.
1017 __ bge(loop_statement.break_label());
1018
1019 // Get the current entry of the array into register r5.
1020 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
1021 __ AddP(r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1022 __ SmiToPtrArrayOffset(r5, r2);
1023 __ LoadP(r5, MemOperand(r5, r4));
1024
1025 // Get the expected map from the stack or a smi in the
1026 // permanent slow case into register r4.
1027 __ LoadP(r4, MemOperand(sp, 3 * kPointerSize));
1028
1029 // Check if the expected map still matches that of the enumerable.
1030 // If not, we may have to filter the key.
1031 Label update_each;
1032 __ LoadP(r3, MemOperand(sp, 4 * kPointerSize));
1033 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1034 __ CmpP(r6, r4);
1035 __ beq(&update_each);
1036
1037 // We need to filter the key, record slow-path here.
1038 int const vector_index = SmiFromSlot(slot)->value();
1039 __ EmitLoadTypeFeedbackVector(r2);
1040 __ mov(r4, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1041 __ StoreP(
1042 r4, FieldMemOperand(r2, FixedArray::OffsetOfElementAt(vector_index)), r0);
1043
1044 // Convert the entry to a string or (smi) 0 if it isn't a property
1045 // any more. If the property has been removed while iterating, we
1046 // just skip it.
1047 __ Push(r3, r5); // Enumerable and current entry.
1048 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001049 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001050 __ LoadRR(r5, r2);
1051 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1052 __ CmpP(r2, r0);
1053 __ beq(loop_statement.continue_label());
1054
1055 // Update the 'each' property or variable from the possibly filtered
1056 // entry in register r5.
1057 __ bind(&update_each);
1058 __ LoadRR(result_register(), r5);
1059 // Perform the assignment as if via '='.
1060 {
1061 EffectContext context(this);
1062 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001063 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001064 }
1065
1066 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001067 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001068 // Generate code for the body of the loop.
1069 Visit(stmt->body());
1070
1071 // Generate code for the going to the next element by incrementing
1072 // the index (smi) stored on top of the stack.
1073 __ bind(loop_statement.continue_label());
1074 __ pop(r2);
1075 __ AddSmiLiteral(r2, r2, Smi::FromInt(1), r0);
1076 __ push(r2);
1077
1078 EmitBackEdgeBookkeeping(stmt, &loop);
1079 __ b(&loop);
1080
1081 // Remove the pointers stored on the stack.
1082 __ bind(loop_statement.break_label());
1083 DropOperands(5);
1084
1085 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001086 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001087 __ bind(&exit);
1088 decrement_loop_depth();
1089}
1090
1091void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1092 FeedbackVectorSlot slot) {
1093 DCHECK(NeedsHomeObject(initializer));
1094 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1095 __ mov(StoreDescriptor::NameRegister(),
1096 Operand(isolate()->factory()->home_object_symbol()));
1097 __ LoadP(StoreDescriptor::ValueRegister(),
1098 MemOperand(sp, offset * kPointerSize));
1099 EmitLoadStoreICSlot(slot);
1100 CallStoreIC();
1101}
1102
1103void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1104 int offset,
1105 FeedbackVectorSlot slot) {
1106 DCHECK(NeedsHomeObject(initializer));
1107 __ Move(StoreDescriptor::ReceiverRegister(), r2);
1108 __ mov(StoreDescriptor::NameRegister(),
1109 Operand(isolate()->factory()->home_object_symbol()));
1110 __ LoadP(StoreDescriptor::ValueRegister(),
1111 MemOperand(sp, offset * kPointerSize));
1112 EmitLoadStoreICSlot(slot);
1113 CallStoreIC();
1114}
1115
1116void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1117 TypeofMode typeof_mode,
1118 Label* slow) {
1119 Register current = cp;
1120 Register next = r3;
1121 Register temp = r4;
1122
1123 Scope* s = scope();
1124 while (s != NULL) {
1125 if (s->num_heap_slots() > 0) {
1126 if (s->calls_sloppy_eval()) {
1127 // Check that extension is "the hole".
1128 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1129 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1130 }
1131 // Load next context in chain.
1132 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1133 // Walk the rest of the chain without clobbering cp.
1134 current = next;
1135 }
1136 // If no outer scope calls eval, we do not need to check more
1137 // context extensions.
1138 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1139 s = s->outer_scope();
1140 }
1141
1142 if (s->is_eval_scope()) {
1143 Label loop, fast;
1144 if (!current.is(next)) {
1145 __ Move(next, current);
1146 }
1147 __ bind(&loop);
1148 // Terminate at native context.
1149 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1150 __ CompareRoot(temp, Heap::kNativeContextMapRootIndex);
1151 __ beq(&fast, Label::kNear);
1152 // Check that extension is "the hole".
1153 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1154 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1155 // Load next context in chain.
1156 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1157 __ b(&loop);
1158 __ bind(&fast);
1159 }
1160
1161 // All extension objects were empty and it is safe to use a normal global
1162 // load machinery.
1163 EmitGlobalVariableLoad(proxy, typeof_mode);
1164}
1165
1166MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1167 Label* slow) {
1168 DCHECK(var->IsContextSlot());
1169 Register context = cp;
1170 Register next = r5;
1171 Register temp = r6;
1172
1173 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1174 if (s->num_heap_slots() > 0) {
1175 if (s->calls_sloppy_eval()) {
1176 // Check that extension is "the hole".
1177 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1178 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1179 }
1180 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1181 // Walk the rest of the chain without clobbering cp.
1182 context = next;
1183 }
1184 }
1185 // Check that last extension is "the hole".
1186 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1187 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1188
1189 // This function is used only for loads, not stores, so it's safe to
1190 // return an cp-based operand (the write barrier cannot be allowed to
1191 // destroy the cp register).
1192 return ContextMemOperand(context, var->index());
1193}
1194
1195void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1196 TypeofMode typeof_mode,
1197 Label* slow, Label* done) {
1198 // Generate fast-case code for variables that might be shadowed by
1199 // eval-introduced variables. Eval is used a lot without
1200 // introducing variables. In those cases, we do not want to
1201 // perform a runtime call for all variables in the scope
1202 // containing the eval.
1203 Variable* var = proxy->var();
1204 if (var->mode() == DYNAMIC_GLOBAL) {
1205 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1206 __ b(done);
1207 } else if (var->mode() == DYNAMIC_LOCAL) {
1208 Variable* local = var->local_if_not_shadowed();
1209 __ LoadP(r2, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001210 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdochda12d292016-06-02 14:46:10 +01001211 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1212 __ bne(done);
Ben Murdochc5610432016-08-08 18:44:38 +01001213 __ mov(r2, Operand(var->name()));
1214 __ push(r2);
1215 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdochda12d292016-06-02 14:46:10 +01001216 }
1217 __ b(done);
1218 }
1219}
1220
1221void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1222 TypeofMode typeof_mode) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001223#ifdef DEBUG
Ben Murdochda12d292016-06-02 14:46:10 +01001224 Variable* var = proxy->var();
1225 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1226 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001227#endif
1228 __ mov(LoadGlobalDescriptor::SlotRegister(),
Ben Murdochda12d292016-06-02 14:46:10 +01001229 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001230 CallLoadGlobalIC(typeof_mode);
Ben Murdochda12d292016-06-02 14:46:10 +01001231}
1232
1233void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1234 TypeofMode typeof_mode) {
1235 // Record position before possible IC call.
1236 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001237 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001238 Variable* var = proxy->var();
1239
1240 // Three cases: global variables, lookup variables, and all other types of
1241 // variables.
1242 switch (var->location()) {
1243 case VariableLocation::GLOBAL:
1244 case VariableLocation::UNALLOCATED: {
1245 Comment cmnt(masm_, "[ Global variable");
1246 EmitGlobalVariableLoad(proxy, typeof_mode);
1247 context()->Plug(r2);
1248 break;
1249 }
1250
1251 case VariableLocation::PARAMETER:
1252 case VariableLocation::LOCAL:
1253 case VariableLocation::CONTEXT: {
1254 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1255 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1256 : "[ Stack variable");
1257 if (NeedsHoleCheckForLoad(proxy)) {
1258 Label done;
1259 // Let and const need a read barrier.
1260 GetVar(r2, var);
1261 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1262 __ bne(&done);
1263 if (var->mode() == LET || var->mode() == CONST) {
1264 // Throw a reference error when using an uninitialized let/const
1265 // binding in harmony mode.
1266 __ mov(r2, Operand(var->name()));
1267 __ push(r2);
1268 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdochda12d292016-06-02 14:46:10 +01001269 }
1270 __ bind(&done);
1271 context()->Plug(r2);
1272 break;
1273 }
1274 context()->Plug(var);
1275 break;
1276 }
1277
1278 case VariableLocation::LOOKUP: {
1279 Comment cmnt(masm_, "[ Lookup variable");
1280 Label done, slow;
1281 // Generate code for loading from variables potentially shadowed
1282 // by eval-introduced variables.
1283 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1284 __ bind(&slow);
1285 __ Push(var->name());
1286 Runtime::FunctionId function_id =
1287 typeof_mode == NOT_INSIDE_TYPEOF
1288 ? Runtime::kLoadLookupSlot
1289 : Runtime::kLoadLookupSlotInsideTypeof;
1290 __ CallRuntime(function_id);
1291 __ bind(&done);
1292 context()->Plug(r2);
1293 }
1294 }
1295}
1296
Ben Murdochda12d292016-06-02 14:46:10 +01001297void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1298 Expression* expression = (property == NULL) ? NULL : property->value();
1299 if (expression == NULL) {
1300 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1301 PushOperand(r3);
1302 } else {
1303 VisitForStackValue(expression);
1304 if (NeedsHomeObject(expression)) {
1305 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1306 property->kind() == ObjectLiteral::Property::SETTER);
1307 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1308 EmitSetHomeObject(expression, offset, property->GetSlot());
1309 }
1310 }
1311}
1312
1313void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1314 Comment cmnt(masm_, "[ ObjectLiteral");
1315
1316 Handle<FixedArray> constant_properties = expr->constant_properties();
1317 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1318 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index()));
1319 __ mov(r3, Operand(constant_properties));
1320 int flags = expr->ComputeFlags();
1321 __ LoadSmiLiteral(r2, Smi::FromInt(flags));
1322 if (MustCreateObjectLiteralWithRuntime(expr)) {
1323 __ Push(r5, r4, r3, r2);
1324 __ CallRuntime(Runtime::kCreateObjectLiteral);
1325 } else {
1326 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1327 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001328 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001329 }
Ben Murdochc5610432016-08-08 18:44:38 +01001330 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001331
1332 // If result_saved is true the result is on top of the stack. If
1333 // result_saved is false the result is in r2.
1334 bool result_saved = false;
1335
1336 AccessorTable accessor_table(zone());
1337 int property_index = 0;
1338 for (; property_index < expr->properties()->length(); property_index++) {
1339 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1340 if (property->is_computed_name()) break;
1341 if (property->IsCompileTimeValue()) continue;
1342
1343 Literal* key = property->key()->AsLiteral();
1344 Expression* value = property->value();
1345 if (!result_saved) {
1346 PushOperand(r2); // Save result on stack
1347 result_saved = true;
1348 }
1349 switch (property->kind()) {
1350 case ObjectLiteral::Property::CONSTANT:
1351 UNREACHABLE();
1352 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1353 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1354 // Fall through.
1355 case ObjectLiteral::Property::COMPUTED:
1356 // It is safe to use [[Put]] here because the boilerplate already
1357 // contains computed properties with an uninitialized value.
1358 if (key->value()->IsInternalizedString()) {
1359 if (property->emit_store()) {
1360 VisitForAccumulatorValue(value);
1361 DCHECK(StoreDescriptor::ValueRegister().is(r2));
1362 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1363 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1364 EmitLoadStoreICSlot(property->GetSlot(0));
1365 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001366 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001367
1368 if (NeedsHomeObject(value)) {
1369 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1370 }
1371 } else {
1372 VisitForEffect(value);
1373 }
1374 break;
1375 }
1376 // Duplicate receiver on stack.
1377 __ LoadP(r2, MemOperand(sp));
1378 PushOperand(r2);
1379 VisitForStackValue(key);
1380 VisitForStackValue(value);
1381 if (property->emit_store()) {
1382 if (NeedsHomeObject(value)) {
1383 EmitSetHomeObject(value, 2, property->GetSlot());
1384 }
1385 __ LoadSmiLiteral(r2, Smi::FromInt(SLOPPY)); // PropertyAttributes
1386 PushOperand(r2);
1387 CallRuntimeWithOperands(Runtime::kSetProperty);
1388 } else {
1389 DropOperands(3);
1390 }
1391 break;
1392 case ObjectLiteral::Property::PROTOTYPE:
1393 // Duplicate receiver on stack.
1394 __ LoadP(r2, MemOperand(sp));
1395 PushOperand(r2);
1396 VisitForStackValue(value);
1397 DCHECK(property->emit_store());
1398 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1399 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001400 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001401 break;
1402 case ObjectLiteral::Property::GETTER:
1403 if (property->emit_store()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001404 AccessorTable::Iterator it = accessor_table.lookup(key);
1405 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1406 it->second->getter = property;
Ben Murdochda12d292016-06-02 14:46:10 +01001407 }
1408 break;
1409 case ObjectLiteral::Property::SETTER:
1410 if (property->emit_store()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001411 AccessorTable::Iterator it = accessor_table.lookup(key);
1412 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1413 it->second->setter = property;
Ben Murdochda12d292016-06-02 14:46:10 +01001414 }
1415 break;
1416 }
1417 }
1418
1419 // Emit code to define accessors, using only a single call to the runtime for
1420 // each pair of corresponding getters and setters.
1421 for (AccessorTable::Iterator it = accessor_table.begin();
1422 it != accessor_table.end(); ++it) {
1423 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver.
1424 PushOperand(r2);
1425 VisitForStackValue(it->first);
1426 EmitAccessor(it->second->getter);
1427 EmitAccessor(it->second->setter);
1428 __ LoadSmiLiteral(r2, Smi::FromInt(NONE));
1429 PushOperand(r2);
1430 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001431 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001432 }
1433
1434 // Object literals have two parts. The "static" part on the left contains no
1435 // computed property names, and so we can compute its map ahead of time; see
1436 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1437 // starts with the first computed property name, and continues with all
1438 // properties to its right. All the code from above initializes the static
1439 // component of the object literal, and arranges for the map of the result to
1440 // reflect the static order in which the keys appear. For the dynamic
1441 // properties, we compile them into a series of "SetOwnProperty" runtime
1442 // calls. This will preserve insertion order.
1443 for (; property_index < expr->properties()->length(); property_index++) {
1444 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1445
1446 Expression* value = property->value();
1447 if (!result_saved) {
1448 PushOperand(r2); // Save result on the stack
1449 result_saved = true;
1450 }
1451
1452 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver.
1453 PushOperand(r2);
1454
1455 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1456 DCHECK(!property->is_computed_name());
1457 VisitForStackValue(value);
1458 DCHECK(property->emit_store());
1459 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1460 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001461 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001462 } else {
1463 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1464 VisitForStackValue(value);
1465 if (NeedsHomeObject(value)) {
1466 EmitSetHomeObject(value, 2, property->GetSlot());
1467 }
1468
1469 switch (property->kind()) {
1470 case ObjectLiteral::Property::CONSTANT:
1471 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1472 case ObjectLiteral::Property::COMPUTED:
1473 if (property->emit_store()) {
1474 PushOperand(Smi::FromInt(NONE));
1475 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1476 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001477 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1478 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001479 } else {
1480 DropOperands(3);
1481 }
1482 break;
1483
1484 case ObjectLiteral::Property::PROTOTYPE:
1485 UNREACHABLE();
1486 break;
1487
1488 case ObjectLiteral::Property::GETTER:
1489 PushOperand(Smi::FromInt(NONE));
1490 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1491 break;
1492
1493 case ObjectLiteral::Property::SETTER:
1494 PushOperand(Smi::FromInt(NONE));
1495 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1496 break;
1497 }
1498 }
1499 }
1500
1501 if (result_saved) {
1502 context()->PlugTOS();
1503 } else {
1504 context()->Plug(r2);
1505 }
1506}
1507
1508void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1509 Comment cmnt(masm_, "[ ArrayLiteral");
1510
1511 Handle<FixedArray> constant_elements = expr->constant_elements();
1512 bool has_fast_elements =
1513 IsFastObjectElementsKind(expr->constant_elements_kind());
1514 Handle<FixedArrayBase> constant_elements_values(
1515 FixedArrayBase::cast(constant_elements->get(1)));
1516
1517 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1518 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1519 // If the only customer of allocation sites is transitioning, then
1520 // we can turn it off if we don't have anywhere else to transition to.
1521 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1522 }
1523
1524 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1525 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index()));
1526 __ mov(r3, Operand(constant_elements));
1527 if (MustCreateArrayLiteralWithRuntime(expr)) {
1528 __ LoadSmiLiteral(r2, Smi::FromInt(expr->ComputeFlags()));
1529 __ Push(r5, r4, r3, r2);
1530 __ CallRuntime(Runtime::kCreateArrayLiteral);
1531 } else {
1532 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1533 __ CallStub(&stub);
1534 }
Ben Murdochc5610432016-08-08 18:44:38 +01001535 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001536
1537 bool result_saved = false; // Is the result saved to the stack?
1538 ZoneList<Expression*>* subexprs = expr->values();
1539 int length = subexprs->length();
1540
1541 // Emit code to evaluate all the non-constant subexpressions and to store
1542 // them into the newly cloned array.
1543 int array_index = 0;
1544 for (; array_index < length; array_index++) {
1545 Expression* subexpr = subexprs->at(array_index);
1546 DCHECK(!subexpr->IsSpread());
1547 // If the subexpression is a literal or a simple materialized literal it
1548 // is already set in the cloned array.
1549 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1550
1551 if (!result_saved) {
1552 PushOperand(r2);
1553 result_saved = true;
1554 }
1555 VisitForAccumulatorValue(subexpr);
1556
1557 __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1558 Smi::FromInt(array_index));
1559 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1560 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1561 Handle<Code> ic =
1562 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1563 CallIC(ic);
1564
Ben Murdochc5610432016-08-08 18:44:38 +01001565 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1566 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001567 }
1568
1569 // In case the array literal contains spread expressions it has two parts. The
1570 // first part is the "static" array which has a literal index is handled
1571 // above. The second part is the part after the first spread expression
1572 // (inclusive) and these elements gets appended to the array. Note that the
1573 // number elements an iterable produces is unknown ahead of time.
1574 if (array_index < length && result_saved) {
1575 PopOperand(r2);
1576 result_saved = false;
1577 }
1578 for (; array_index < length; array_index++) {
1579 Expression* subexpr = subexprs->at(array_index);
1580
1581 PushOperand(r2);
1582 DCHECK(!subexpr->IsSpread());
1583 VisitForStackValue(subexpr);
1584 CallRuntimeWithOperands(Runtime::kAppendElement);
1585
Ben Murdochc5610432016-08-08 18:44:38 +01001586 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1587 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001588 }
1589
1590 if (result_saved) {
1591 context()->PlugTOS();
1592 } else {
1593 context()->Plug(r2);
1594 }
1595}
1596
1597void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1598 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1599
1600 Comment cmnt(masm_, "[ Assignment");
Ben Murdochda12d292016-06-02 14:46:10 +01001601
1602 Property* property = expr->target()->AsProperty();
1603 LhsKind assign_type = Property::GetAssignType(property);
1604
1605 // Evaluate LHS expression.
1606 switch (assign_type) {
1607 case VARIABLE:
1608 // Nothing to do here.
1609 break;
1610 case NAMED_PROPERTY:
1611 if (expr->is_compound()) {
1612 // We need the receiver both on the stack and in the register.
1613 VisitForStackValue(property->obj());
1614 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1615 } else {
1616 VisitForStackValue(property->obj());
1617 }
1618 break;
1619 case NAMED_SUPER_PROPERTY:
1620 VisitForStackValue(
1621 property->obj()->AsSuperPropertyReference()->this_var());
1622 VisitForAccumulatorValue(
1623 property->obj()->AsSuperPropertyReference()->home_object());
1624 PushOperand(result_register());
1625 if (expr->is_compound()) {
1626 const Register scratch = r3;
1627 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1628 PushOperands(scratch, result_register());
1629 }
1630 break;
1631 case KEYED_SUPER_PROPERTY: {
1632 const Register scratch = r3;
1633 VisitForStackValue(
1634 property->obj()->AsSuperPropertyReference()->this_var());
1635 VisitForAccumulatorValue(
1636 property->obj()->AsSuperPropertyReference()->home_object());
1637 __ LoadRR(scratch, result_register());
1638 VisitForAccumulatorValue(property->key());
1639 PushOperands(scratch, result_register());
1640 if (expr->is_compound()) {
1641 const Register scratch1 = r4;
1642 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1643 PushOperands(scratch1, scratch, result_register());
1644 }
1645 break;
1646 }
1647 case KEYED_PROPERTY:
1648 if (expr->is_compound()) {
1649 VisitForStackValue(property->obj());
1650 VisitForStackValue(property->key());
1651 __ LoadP(LoadDescriptor::ReceiverRegister(),
1652 MemOperand(sp, 1 * kPointerSize));
1653 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1654 } else {
1655 VisitForStackValue(property->obj());
1656 VisitForStackValue(property->key());
1657 }
1658 break;
1659 }
1660
1661 // For compound assignments we need another deoptimization point after the
1662 // variable/property load.
1663 if (expr->is_compound()) {
1664 {
1665 AccumulatorValueContext context(this);
1666 switch (assign_type) {
1667 case VARIABLE:
1668 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001669 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001670 break;
1671 case NAMED_PROPERTY:
1672 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001673 PrepareForBailoutForId(property->LoadId(),
1674 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001675 break;
1676 case NAMED_SUPER_PROPERTY:
1677 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001678 PrepareForBailoutForId(property->LoadId(),
1679 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001680 break;
1681 case KEYED_SUPER_PROPERTY:
1682 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001683 PrepareForBailoutForId(property->LoadId(),
1684 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001685 break;
1686 case KEYED_PROPERTY:
1687 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001688 PrepareForBailoutForId(property->LoadId(),
1689 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001690 break;
1691 }
1692 }
1693
1694 Token::Value op = expr->binary_op();
1695 PushOperand(r2); // Left operand goes on the stack.
1696 VisitForAccumulatorValue(expr->value());
1697
1698 AccumulatorValueContext context(this);
1699 if (ShouldInlineSmiCase(op)) {
1700 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1701 expr->value());
1702 } else {
1703 EmitBinaryOp(expr->binary_operation(), op);
1704 }
1705
1706 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001707 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001708 } else {
1709 VisitForAccumulatorValue(expr->value());
1710 }
1711
1712 SetExpressionPosition(expr);
1713
1714 // Store the value.
1715 switch (assign_type) {
1716 case VARIABLE:
1717 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1718 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001719 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01001720 context()->Plug(r2);
1721 break;
1722 case NAMED_PROPERTY:
1723 EmitNamedPropertyAssignment(expr);
1724 break;
1725 case NAMED_SUPER_PROPERTY:
1726 EmitNamedSuperPropertyStore(property);
1727 context()->Plug(r2);
1728 break;
1729 case KEYED_SUPER_PROPERTY:
1730 EmitKeyedSuperPropertyStore(property);
1731 context()->Plug(r2);
1732 break;
1733 case KEYED_PROPERTY:
1734 EmitKeyedPropertyAssignment(expr);
1735 break;
1736 }
1737}
1738
1739void FullCodeGenerator::VisitYield(Yield* expr) {
1740 Comment cmnt(masm_, "[ Yield");
1741 SetExpressionPosition(expr);
1742
1743 // Evaluate yielded value first; the initial iterator definition depends on
1744 // this. It stays on the stack while we update the iterator.
1745 VisitForStackValue(expr->expression());
1746
Ben Murdochc5610432016-08-08 18:44:38 +01001747 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdochda12d292016-06-02 14:46:10 +01001748
1749 __ b(&suspend);
1750 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001751 // When we arrive here, r2 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001752 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001753 __ LoadP(r3, FieldMemOperand(r2, JSGeneratorObject::kResumeModeOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001754 __ LoadP(r2, FieldMemOperand(r2, JSGeneratorObject::kInputOrDebugPosOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01001755 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1756 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1757 __ CmpSmiLiteral(r3, Smi::FromInt(JSGeneratorObject::kReturn), r0);
1758 __ blt(&resume);
1759 __ Push(result_register());
1760 __ bgt(&exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001761 EmitCreateIteratorResult(true);
1762 EmitUnwindAndReturn();
1763
Ben Murdochc5610432016-08-08 18:44:38 +01001764 __ bind(&exception);
1765 __ CallRuntime(Runtime::kThrow);
1766
Ben Murdochda12d292016-06-02 14:46:10 +01001767 __ bind(&suspend);
1768 OperandStackDepthIncrement(1); // Not popped on this path.
1769 VisitForAccumulatorValue(expr->generator_object());
1770 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1771 __ LoadSmiLiteral(r3, Smi::FromInt(continuation.pos()));
1772 __ StoreP(r3, FieldMemOperand(r2, JSGeneratorObject::kContinuationOffset),
1773 r0);
1774 __ StoreP(cp, FieldMemOperand(r2, JSGeneratorObject::kContextOffset), r0);
1775 __ LoadRR(r3, cp);
1776 __ RecordWriteField(r2, JSGeneratorObject::kContextOffset, r3, r4,
1777 kLRHasBeenSaved, kDontSaveFPRegs);
1778 __ AddP(r3, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1779 __ CmpP(sp, r3);
1780 __ beq(&post_runtime);
1781 __ push(r2); // generator object
1782 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001783 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001784 __ bind(&post_runtime);
1785 PopOperand(result_register());
1786 EmitReturnSequence();
1787
1788 __ bind(&resume);
1789 context()->Plug(result_register());
1790}
1791
Ben Murdochda12d292016-06-02 14:46:10 +01001792void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1793 OperandStackDepthIncrement(2);
1794 __ Push(reg1, reg2);
1795}
1796
1797void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1798 Register reg3) {
1799 OperandStackDepthIncrement(3);
1800 __ Push(reg1, reg2, reg3);
1801}
1802
1803void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1804 Register reg3, Register reg4) {
1805 OperandStackDepthIncrement(4);
1806 __ Push(reg1, reg2, reg3, reg4);
1807}
1808
1809void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1810 OperandStackDepthDecrement(2);
1811 __ Pop(reg1, reg2);
1812}
1813
1814void FullCodeGenerator::EmitOperandStackDepthCheck() {
1815 if (FLAG_debug_code) {
1816 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1817 operand_stack_depth_ * kPointerSize;
1818 __ SubP(r2, fp, sp);
1819 __ CmpP(r2, Operand(expected_diff));
1820 __ Assert(eq, kUnexpectedStackDepth);
1821 }
1822}
1823
1824void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1825 Label allocate, done_allocate;
1826
Ben Murdochc5610432016-08-08 18:44:38 +01001827 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &allocate,
1828 NO_ALLOCATION_FLAGS);
Ben Murdochda12d292016-06-02 14:46:10 +01001829 __ b(&done_allocate);
1830
1831 __ bind(&allocate);
1832 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1833 __ CallRuntime(Runtime::kAllocateInNewSpace);
1834
1835 __ bind(&done_allocate);
1836 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3);
1837 PopOperand(r4);
1838 __ LoadRoot(r5,
1839 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1840 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
1841 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0);
1842 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
1843 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
1844 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0);
1845 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0);
1846}
1847
1848void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1849 Token::Value op,
1850 Expression* left_expr,
1851 Expression* right_expr) {
1852 Label done, smi_case, stub_call;
1853
1854 Register scratch1 = r4;
1855 Register scratch2 = r5;
1856
1857 // Get the arguments.
1858 Register left = r3;
1859 Register right = r2;
1860 PopOperand(left);
1861
1862 // Perform combined smi check on both operands.
1863 __ LoadRR(scratch1, right);
1864 __ OrP(scratch1, left);
1865 STATIC_ASSERT(kSmiTag == 0);
1866 JumpPatchSite patch_site(masm_);
1867 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1868
1869 __ bind(&stub_call);
1870 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1871 CallIC(code, expr->BinaryOperationFeedbackId());
1872 patch_site.EmitPatchInfo();
1873 __ b(&done);
1874
1875 __ bind(&smi_case);
1876 // Smi case. This code works the same way as the smi-smi case in the type
1877 // recording binary operation stub.
1878 switch (op) {
1879 case Token::SAR:
1880 __ GetLeastBitsFromSmi(scratch1, right, 5);
1881 __ ShiftRightArithP(right, left, scratch1);
1882 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
1883 break;
1884 case Token::SHL: {
1885 __ GetLeastBitsFromSmi(scratch2, right, 5);
1886#if V8_TARGET_ARCH_S390X
1887 __ ShiftLeftP(right, left, scratch2);
1888#else
1889 __ SmiUntag(scratch1, left);
1890 __ ShiftLeftP(scratch1, scratch1, scratch2);
1891 // Check that the *signed* result fits in a smi
1892 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
1893 __ SmiTag(right, scratch1);
1894#endif
1895 break;
1896 }
1897 case Token::SHR: {
1898 __ SmiUntag(scratch1, left);
1899 __ GetLeastBitsFromSmi(scratch2, right, 5);
1900 __ srl(scratch1, scratch2);
1901 // Unsigned shift is not allowed to produce a negative number.
1902 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
1903 __ SmiTag(right, scratch1);
1904 break;
1905 }
1906 case Token::ADD: {
1907 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1908 __ BranchOnOverflow(&stub_call);
1909 __ LoadRR(right, scratch1);
1910 break;
1911 }
1912 case Token::SUB: {
1913 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1914 __ BranchOnOverflow(&stub_call);
1915 __ LoadRR(right, scratch1);
1916 break;
1917 }
1918 case Token::MUL: {
1919 Label mul_zero;
1920#if V8_TARGET_ARCH_S390X
1921 // Remove tag from both operands.
1922 __ SmiUntag(ip, right);
1923 __ SmiUntag(scratch2, left);
1924 __ mr_z(scratch1, ip);
1925 // Check for overflowing the smi range - no overflow if higher 33 bits of
1926 // the result are identical.
1927 __ lr(ip, scratch2); // 32 bit load
1928 __ sra(ip, Operand(31));
1929 __ cr_z(ip, scratch1); // 32 bit compare
1930 __ bne(&stub_call);
1931#else
1932 __ SmiUntag(ip, right);
1933 __ LoadRR(scratch2, left); // load into low order of reg pair
1934 __ mr_z(scratch1, ip); // R4:R5 = R5 * ip
1935 // Check for overflowing the smi range - no overflow if higher 33 bits of
1936 // the result are identical.
1937 __ TestIfInt32(scratch1, scratch2, ip);
1938 __ bne(&stub_call);
1939#endif
1940 // Go slow on zero result to handle -0.
1941 __ chi(scratch2, Operand::Zero());
1942 __ beq(&mul_zero, Label::kNear);
1943#if V8_TARGET_ARCH_S390X
1944 __ SmiTag(right, scratch2);
1945#else
1946 __ LoadRR(right, scratch2);
1947#endif
1948 __ b(&done);
1949 // We need -0 if we were multiplying a negative number with 0 to get 0.
1950 // We know one of them was zero.
1951 __ bind(&mul_zero);
1952 __ AddP(scratch2, right, left);
1953 __ CmpP(scratch2, Operand::Zero());
1954 __ blt(&stub_call);
1955 __ LoadSmiLiteral(right, Smi::FromInt(0));
1956 break;
1957 }
1958 case Token::BIT_OR:
1959 __ OrP(right, left);
1960 break;
1961 case Token::BIT_AND:
1962 __ AndP(right, left);
1963 break;
1964 case Token::BIT_XOR:
1965 __ XorP(right, left);
1966 break;
1967 default:
1968 UNREACHABLE();
1969 }
1970
1971 __ bind(&done);
1972 context()->Plug(r2);
1973}
1974
1975void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1976 for (int i = 0; i < lit->properties()->length(); i++) {
1977 ObjectLiteral::Property* property = lit->properties()->at(i);
1978 Expression* value = property->value();
1979
1980 Register scratch = r3;
1981 if (property->is_static()) {
1982 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
1983 } else {
1984 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
1985 }
1986 PushOperand(scratch);
1987 EmitPropertyKey(property, lit->GetIdForProperty(i));
1988
1989 // The static prototype property is read only. We handle the non computed
1990 // property name case in the parser. Since this is the only case where we
1991 // need to check for an own read only property we special case this so we do
1992 // not need to do this for every property.
1993 if (property->is_static() && property->is_computed_name()) {
1994 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1995 __ push(r2);
1996 }
1997
1998 VisitForStackValue(value);
1999 if (NeedsHomeObject(value)) {
2000 EmitSetHomeObject(value, 2, property->GetSlot());
2001 }
2002
2003 switch (property->kind()) {
2004 case ObjectLiteral::Property::CONSTANT:
2005 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2006 case ObjectLiteral::Property::PROTOTYPE:
2007 UNREACHABLE();
2008 case ObjectLiteral::Property::COMPUTED:
2009 PushOperand(Smi::FromInt(DONT_ENUM));
2010 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2011 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
2012 break;
2013
2014 case ObjectLiteral::Property::GETTER:
2015 PushOperand(Smi::FromInt(DONT_ENUM));
2016 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
2017 break;
2018
2019 case ObjectLiteral::Property::SETTER:
2020 PushOperand(Smi::FromInt(DONT_ENUM));
2021 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
2022 break;
2023
2024 default:
2025 UNREACHABLE();
2026 }
2027 }
2028}
2029
2030void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2031 PopOperand(r3);
2032 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2033 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2034 CallIC(code, expr->BinaryOperationFeedbackId());
2035 patch_site.EmitPatchInfo();
2036 context()->Plug(r2);
2037}
2038
2039void FullCodeGenerator::EmitAssignment(Expression* expr,
2040 FeedbackVectorSlot slot) {
2041 DCHECK(expr->IsValidReferenceExpressionOrThis());
2042
2043 Property* prop = expr->AsProperty();
2044 LhsKind assign_type = Property::GetAssignType(prop);
2045
2046 switch (assign_type) {
2047 case VARIABLE: {
2048 Variable* var = expr->AsVariableProxy()->var();
2049 EffectContext context(this);
2050 EmitVariableAssignment(var, Token::ASSIGN, slot);
2051 break;
2052 }
2053 case NAMED_PROPERTY: {
2054 PushOperand(r2); // Preserve value.
2055 VisitForAccumulatorValue(prop->obj());
2056 __ Move(StoreDescriptor::ReceiverRegister(), r2);
2057 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
2058 __ mov(StoreDescriptor::NameRegister(),
2059 Operand(prop->key()->AsLiteral()->value()));
2060 EmitLoadStoreICSlot(slot);
2061 CallStoreIC();
2062 break;
2063 }
2064 case NAMED_SUPER_PROPERTY: {
2065 PushOperand(r2);
2066 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2067 VisitForAccumulatorValue(
2068 prop->obj()->AsSuperPropertyReference()->home_object());
2069 // stack: value, this; r2: home_object
2070 Register scratch = r4;
2071 Register scratch2 = r5;
2072 __ LoadRR(scratch, result_register()); // home_object
2073 __ LoadP(r2, MemOperand(sp, kPointerSize)); // value
2074 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2075 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2076 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2077 // stack: this, home_object; r2: value
2078 EmitNamedSuperPropertyStore(prop);
2079 break;
2080 }
2081 case KEYED_SUPER_PROPERTY: {
2082 PushOperand(r2);
2083 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2084 VisitForStackValue(
2085 prop->obj()->AsSuperPropertyReference()->home_object());
2086 VisitForAccumulatorValue(prop->key());
2087 Register scratch = r4;
2088 Register scratch2 = r5;
2089 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2090 // stack: value, this, home_object; r3: key, r6: value
2091 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2092 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2093 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2094 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2095 __ StoreP(r2, MemOperand(sp, 0));
2096 __ Move(r2, scratch2);
2097 // stack: this, home_object, key; r2: value.
2098 EmitKeyedSuperPropertyStore(prop);
2099 break;
2100 }
2101 case KEYED_PROPERTY: {
2102 PushOperand(r2); // Preserve value.
2103 VisitForStackValue(prop->obj());
2104 VisitForAccumulatorValue(prop->key());
2105 __ Move(StoreDescriptor::NameRegister(), r2);
2106 PopOperands(StoreDescriptor::ValueRegister(),
2107 StoreDescriptor::ReceiverRegister());
2108 EmitLoadStoreICSlot(slot);
2109 Handle<Code> ic =
2110 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2111 CallIC(ic);
2112 break;
2113 }
2114 }
2115 context()->Plug(r2);
2116}
2117
2118void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2119 Variable* var, MemOperand location) {
2120 __ StoreP(result_register(), location);
2121 if (var->IsContextSlot()) {
2122 // RecordWrite may destroy all its register arguments.
2123 __ LoadRR(r5, result_register());
2124 int offset = Context::SlotOffset(var->index());
2125 __ RecordWriteContextSlot(r3, offset, r5, r4, kLRHasBeenSaved,
2126 kDontSaveFPRegs);
2127 }
2128}
2129
2130void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2131 FeedbackVectorSlot slot) {
2132 if (var->IsUnallocated()) {
2133 // Global var, const, or let.
2134 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2135 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2136 EmitLoadStoreICSlot(slot);
2137 CallStoreIC();
2138
2139 } else if (var->mode() == LET && op != Token::INIT) {
2140 // Non-initializing assignment to let variable needs a write barrier.
2141 DCHECK(!var->IsLookupSlot());
2142 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2143 Label assign;
2144 MemOperand location = VarOperand(var, r3);
2145 __ LoadP(r5, location);
2146 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2147 __ bne(&assign);
2148 __ mov(r5, Operand(var->name()));
2149 __ push(r5);
2150 __ CallRuntime(Runtime::kThrowReferenceError);
2151 // Perform the assignment.
2152 __ bind(&assign);
2153 EmitStoreToStackLocalOrContextSlot(var, location);
2154
2155 } else if (var->mode() == CONST && op != Token::INIT) {
2156 // Assignment to const variable needs a write barrier.
2157 DCHECK(!var->IsLookupSlot());
2158 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2159 Label const_error;
2160 MemOperand location = VarOperand(var, r3);
2161 __ LoadP(r5, location);
2162 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2163 __ bne(&const_error, Label::kNear);
2164 __ mov(r5, Operand(var->name()));
2165 __ push(r5);
2166 __ CallRuntime(Runtime::kThrowReferenceError);
2167 __ bind(&const_error);
2168 __ CallRuntime(Runtime::kThrowConstAssignError);
2169
2170 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2171 // Initializing assignment to const {this} needs a write barrier.
2172 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2173 Label uninitialized_this;
2174 MemOperand location = VarOperand(var, r3);
2175 __ LoadP(r5, location);
2176 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2177 __ beq(&uninitialized_this);
2178 __ mov(r3, Operand(var->name()));
2179 __ push(r3);
2180 __ CallRuntime(Runtime::kThrowReferenceError);
2181 __ bind(&uninitialized_this);
2182 EmitStoreToStackLocalOrContextSlot(var, location);
2183
Ben Murdochc5610432016-08-08 18:44:38 +01002184 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdochda12d292016-06-02 14:46:10 +01002185 if (var->IsLookupSlot()) {
2186 // Assignment to var.
2187 __ Push(var->name());
2188 __ Push(r2);
2189 __ CallRuntime(is_strict(language_mode())
2190 ? Runtime::kStoreLookupSlot_Strict
2191 : Runtime::kStoreLookupSlot_Sloppy);
2192 } else {
2193 // Assignment to var or initializing assignment to let/const in harmony
2194 // mode.
2195 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2196 MemOperand location = VarOperand(var, r3);
2197 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2198 // Check for an uninitialized let binding.
2199 __ LoadP(r4, location);
2200 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
2201 __ Check(eq, kLetBindingReInitialization);
2202 }
2203 EmitStoreToStackLocalOrContextSlot(var, location);
2204 }
Ben Murdochda12d292016-06-02 14:46:10 +01002205 } else {
2206 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2207 if (is_strict(language_mode())) {
2208 __ CallRuntime(Runtime::kThrowConstAssignError);
2209 }
2210 // Silently ignore store in sloppy mode.
2211 }
2212}
2213
2214void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2215 // Assignment to a property, using a named store IC.
2216 Property* prop = expr->target()->AsProperty();
2217 DCHECK(prop != NULL);
2218 DCHECK(prop->key()->IsLiteral());
2219
2220 __ mov(StoreDescriptor::NameRegister(),
2221 Operand(prop->key()->AsLiteral()->value()));
2222 PopOperand(StoreDescriptor::ReceiverRegister());
2223 EmitLoadStoreICSlot(expr->AssignmentSlot());
2224 CallStoreIC();
2225
Ben Murdochc5610432016-08-08 18:44:38 +01002226 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002227 context()->Plug(r2);
2228}
2229
2230void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2231 // Assignment to named property of super.
2232 // r2 : value
2233 // stack : receiver ('this'), home_object
2234 DCHECK(prop != NULL);
2235 Literal* key = prop->key()->AsLiteral();
2236 DCHECK(key != NULL);
2237
2238 PushOperand(key->value());
2239 PushOperand(r2);
2240 CallRuntimeWithOperands((is_strict(language_mode())
2241 ? Runtime::kStoreToSuper_Strict
2242 : Runtime::kStoreToSuper_Sloppy));
2243}
2244
2245void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2246 // Assignment to named property of super.
2247 // r2 : value
2248 // stack : receiver ('this'), home_object, key
2249 DCHECK(prop != NULL);
2250
2251 PushOperand(r2);
2252 CallRuntimeWithOperands((is_strict(language_mode())
2253 ? Runtime::kStoreKeyedToSuper_Strict
2254 : Runtime::kStoreKeyedToSuper_Sloppy));
2255}
2256
2257void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2258 // Assignment to a property, using a keyed store IC.
2259 PopOperands(StoreDescriptor::ReceiverRegister(),
2260 StoreDescriptor::NameRegister());
2261 DCHECK(StoreDescriptor::ValueRegister().is(r2));
2262
2263 Handle<Code> ic =
2264 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2265 EmitLoadStoreICSlot(expr->AssignmentSlot());
2266 CallIC(ic);
2267
Ben Murdochc5610432016-08-08 18:44:38 +01002268 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002269 context()->Plug(r2);
2270}
2271
2272void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2273 ic_total_count_++;
2274 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2275}
2276
2277// Code common for calls using the IC.
2278void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2279 Expression* callee = expr->expression();
2280
2281 // Get the target function.
2282 ConvertReceiverMode convert_mode;
2283 if (callee->IsVariableProxy()) {
2284 {
2285 StackValueContext context(this);
2286 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002287 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002288 }
2289 // Push undefined as receiver. This is patched in the method prologue if it
2290 // is a sloppy mode method.
2291 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2292 PushOperand(r1);
2293 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2294 } else {
2295 // Load the function from the receiver.
2296 DCHECK(callee->IsProperty());
2297 DCHECK(!callee->AsProperty()->IsSuperAccess());
2298 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2299 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002300 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2301 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002302 // Push the target function under the receiver.
2303 __ LoadP(r1, MemOperand(sp, 0));
2304 PushOperand(r1);
2305 __ StoreP(r2, MemOperand(sp, kPointerSize));
2306 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2307 }
2308
2309 EmitCall(expr, convert_mode);
2310}
2311
2312void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2313 Expression* callee = expr->expression();
2314 DCHECK(callee->IsProperty());
2315 Property* prop = callee->AsProperty();
2316 DCHECK(prop->IsSuperAccess());
2317 SetExpressionPosition(prop);
2318
2319 Literal* key = prop->key()->AsLiteral();
2320 DCHECK(!key->value()->IsSmi());
2321 // Load the function from the receiver.
2322 const Register scratch = r3;
2323 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2324 VisitForAccumulatorValue(super_ref->home_object());
2325 __ LoadRR(scratch, r2);
2326 VisitForAccumulatorValue(super_ref->this_var());
2327 PushOperands(scratch, r2, r2, scratch);
2328 PushOperand(key->value());
2329
2330 // Stack here:
2331 // - home_object
2332 // - this (receiver)
2333 // - this (receiver) <-- LoadFromSuper will pop here and below.
2334 // - home_object
2335 // - key
2336 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002337 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002338
2339 // Replace home_object with target function.
2340 __ StoreP(r2, MemOperand(sp, kPointerSize));
2341
2342 // Stack here:
2343 // - target function
2344 // - this (receiver)
2345 EmitCall(expr);
2346}
2347
2348// Code common for calls using the IC.
2349void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2350 // Load the key.
2351 VisitForAccumulatorValue(key);
2352
2353 Expression* callee = expr->expression();
2354
2355 // Load the function from the receiver.
2356 DCHECK(callee->IsProperty());
2357 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2358 __ Move(LoadDescriptor::NameRegister(), r2);
2359 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002360 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2361 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002362
2363 // Push the target function under the receiver.
2364 __ LoadP(ip, MemOperand(sp, 0));
2365 PushOperand(ip);
2366 __ StoreP(r2, MemOperand(sp, kPointerSize));
2367
2368 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2369}
2370
2371void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2372 Expression* callee = expr->expression();
2373 DCHECK(callee->IsProperty());
2374 Property* prop = callee->AsProperty();
2375 DCHECK(prop->IsSuperAccess());
2376
2377 SetExpressionPosition(prop);
2378 // Load the function from the receiver.
2379 const Register scratch = r3;
2380 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2381 VisitForAccumulatorValue(super_ref->home_object());
2382 __ LoadRR(scratch, r2);
2383 VisitForAccumulatorValue(super_ref->this_var());
2384 PushOperands(scratch, r2, r2, scratch);
2385 VisitForStackValue(prop->key());
2386
2387 // Stack here:
2388 // - home_object
2389 // - this (receiver)
2390 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2391 // - home_object
2392 // - key
2393 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002394 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01002395
2396 // Replace home_object with target function.
2397 __ StoreP(r2, MemOperand(sp, kPointerSize));
2398
2399 // Stack here:
2400 // - target function
2401 // - this (receiver)
2402 EmitCall(expr);
2403}
2404
2405void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2406 // Load the arguments.
2407 ZoneList<Expression*>* args = expr->arguments();
2408 int arg_count = args->length();
2409 for (int i = 0; i < arg_count; i++) {
2410 VisitForStackValue(args->at(i));
2411 }
2412
Ben Murdochc5610432016-08-08 18:44:38 +01002413 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002414 SetCallPosition(expr, expr->tail_call_mode());
2415 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2416 if (FLAG_trace) {
2417 __ CallRuntime(Runtime::kTraceTailCall);
2418 }
2419 // Update profiling counters before the tail call since we will
2420 // not return to this function.
2421 EmitProfilingCounterHandlingForReturnSequence(true);
2422 }
2423 Handle<Code> ic =
2424 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2425 .code();
2426 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallFeedbackICSlot()));
2427 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2428 // Don't assign a type feedback id to the IC, since type feedback is provided
2429 // by the vector above.
2430 CallIC(ic);
2431 OperandStackDepthDecrement(arg_count + 1);
2432
2433 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002434 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002435 context()->DropAndPlug(1, r2);
2436}
2437
Ben Murdochc5610432016-08-08 18:44:38 +01002438void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2439 int arg_count = expr->arguments()->length();
Ben Murdochda12d292016-06-02 14:46:10 +01002440 // r6: copy of the first argument or undefined if it doesn't exist.
2441 if (arg_count > 0) {
2442 __ LoadP(r6, MemOperand(sp, arg_count * kPointerSize), r0);
2443 } else {
2444 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
2445 }
2446
2447 // r5: the receiver of the enclosing function.
2448 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2449
2450 // r4: language mode.
2451 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode()));
2452
2453 // r3: the start position of the scope the calls resides in.
2454 __ LoadSmiLiteral(r3, Smi::FromInt(scope()->start_position()));
2455
Ben Murdochc5610432016-08-08 18:44:38 +01002456 // r2: the source position of the eval call.
2457 __ LoadSmiLiteral(r2, Smi::FromInt(expr->position()));
2458
Ben Murdochda12d292016-06-02 14:46:10 +01002459 // Do the runtime call.
Ben Murdochc5610432016-08-08 18:44:38 +01002460 __ Push(r6, r5, r4, r3, r2);
Ben Murdochda12d292016-06-02 14:46:10 +01002461 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2462}
2463
2464// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2465void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2466 VariableProxy* callee = expr->expression()->AsVariableProxy();
2467 if (callee->var()->IsLookupSlot()) {
2468 Label slow, done;
2469 SetExpressionPosition(callee);
2470 // Generate code for loading from variables potentially shadowed by
2471 // eval-introduced variables.
2472 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2473
2474 __ bind(&slow);
2475 // Call the runtime to find the function to call (returned in r2) and
2476 // the object holding it (returned in r3).
2477 __ Push(callee->name());
2478 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2479 PushOperands(r2, r3); // Function, receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002480 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002481
2482 // If fast case code has been generated, emit code to push the function
2483 // and receiver and have the slow path jump around this code.
2484 if (done.is_linked()) {
2485 Label call;
2486 __ b(&call);
2487 __ bind(&done);
2488 // Push function.
2489 __ push(r2);
2490 // Pass undefined as the receiver, which is the WithBaseObject of a
2491 // non-object environment record. If the callee is sloppy, it will patch
2492 // it up to be the global receiver.
2493 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2494 __ push(r3);
2495 __ bind(&call);
2496 }
2497 } else {
2498 VisitForStackValue(callee);
2499 // refEnv.WithBaseObject()
2500 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2501 PushOperand(r4); // Reserved receiver slot.
2502 }
2503}
2504
2505void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002506 // In a call to eval, we first call
2507 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
2508 // to call. Then we call the resolved function using the given arguments.
Ben Murdochda12d292016-06-02 14:46:10 +01002509 ZoneList<Expression*>* args = expr->arguments();
2510 int arg_count = args->length();
2511
2512 PushCalleeAndWithBaseObject(expr);
2513
2514 // Push the arguments.
2515 for (int i = 0; i < arg_count; i++) {
2516 VisitForStackValue(args->at(i));
2517 }
2518
2519 // Push a copy of the function (found below the arguments) and
2520 // resolve eval.
2521 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2522 __ push(r3);
Ben Murdochc5610432016-08-08 18:44:38 +01002523 EmitResolvePossiblyDirectEval(expr);
Ben Murdochda12d292016-06-02 14:46:10 +01002524
2525 // Touch up the stack with the resolved function.
2526 __ StoreP(r2, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2527
Ben Murdochc5610432016-08-08 18:44:38 +01002528 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002529
2530 // Record source position for debugger.
2531 SetCallPosition(expr);
2532 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2533 __ mov(r2, Operand(arg_count));
2534 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2535 expr->tail_call_mode()),
2536 RelocInfo::CODE_TARGET);
2537 OperandStackDepthDecrement(arg_count + 1);
2538 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002539 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002540 context()->DropAndPlug(1, r2);
2541}
2542
2543void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2544 Comment cmnt(masm_, "[ CallNew");
2545 // According to ECMA-262, section 11.2.2, page 44, the function
2546 // expression in new calls must be evaluated before the
2547 // arguments.
2548
2549 // Push constructor on the stack. If it's not a function it's used as
2550 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2551 // ignored.
2552 DCHECK(!expr->expression()->IsSuperPropertyReference());
2553 VisitForStackValue(expr->expression());
2554
2555 // Push the arguments ("left-to-right") on the stack.
2556 ZoneList<Expression*>* args = expr->arguments();
2557 int arg_count = args->length();
2558 for (int i = 0; i < arg_count; i++) {
2559 VisitForStackValue(args->at(i));
2560 }
2561
2562 // Call the construct call builtin that handles allocation and
2563 // constructor invocation.
2564 SetConstructCallPosition(expr);
2565
2566 // Load function and argument count into r3 and r2.
2567 __ mov(r2, Operand(arg_count));
2568 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize), r0);
2569
2570 // Record call targets in unoptimized code.
2571 __ EmitLoadTypeFeedbackVector(r4);
2572 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallNewFeedbackSlot()));
2573
2574 CallConstructStub stub(isolate());
2575 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2576 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002577 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2578 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002579 context()->Plug(r2);
2580}
2581
2582void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2583 SuperCallReference* super_call_ref =
2584 expr->expression()->AsSuperCallReference();
2585 DCHECK_NOT_NULL(super_call_ref);
2586
2587 // Push the super constructor target on the stack (may be null,
2588 // but the Construct builtin can deal with that properly).
2589 VisitForAccumulatorValue(super_call_ref->this_function_var());
2590 __ AssertFunction(result_register());
2591 __ LoadP(result_register(),
2592 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2593 __ LoadP(result_register(),
2594 FieldMemOperand(result_register(), Map::kPrototypeOffset));
2595 PushOperand(result_register());
2596
2597 // Push the arguments ("left-to-right") on the stack.
2598 ZoneList<Expression*>* args = expr->arguments();
2599 int arg_count = args->length();
2600 for (int i = 0; i < arg_count; i++) {
2601 VisitForStackValue(args->at(i));
2602 }
2603
2604 // Call the construct call builtin that handles allocation and
2605 // constructor invocation.
2606 SetConstructCallPosition(expr);
2607
2608 // Load new target into r5.
2609 VisitForAccumulatorValue(super_call_ref->new_target_var());
2610 __ LoadRR(r5, result_register());
2611
2612 // Load function and argument count into r1 and r0.
2613 __ mov(r2, Operand(arg_count));
2614 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize));
2615
2616 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2617 OperandStackDepthDecrement(arg_count + 1);
2618
2619 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002620 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002621 context()->Plug(r2);
2622}
2623
2624void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2625 ZoneList<Expression*>* args = expr->arguments();
2626 DCHECK(args->length() == 1);
2627
2628 VisitForAccumulatorValue(args->at(0));
2629
2630 Label materialize_true, materialize_false, skip_lookup;
2631 Label* if_true = NULL;
2632 Label* if_false = NULL;
2633 Label* fall_through = NULL;
2634 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2635 &if_false, &fall_through);
2636
2637 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2638 __ TestIfSmi(r2);
2639 Split(eq, if_true, if_false, fall_through);
2640
2641 context()->Plug(if_true, if_false);
2642}
2643
2644void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2645 ZoneList<Expression*>* args = expr->arguments();
2646 DCHECK(args->length() == 1);
2647
2648 VisitForAccumulatorValue(args->at(0));
2649
2650 Label materialize_true, materialize_false;
2651 Label* if_true = NULL;
2652 Label* if_false = NULL;
2653 Label* fall_through = NULL;
2654 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2655 &if_false, &fall_through);
2656
2657 __ JumpIfSmi(r2, if_false);
2658 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE);
2659 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2660 Split(ge, if_true, if_false, fall_through);
2661
2662 context()->Plug(if_true, if_false);
2663}
2664
2665void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2666 ZoneList<Expression*>* args = expr->arguments();
2667 DCHECK(args->length() == 1);
2668
2669 VisitForAccumulatorValue(args->at(0));
2670
2671 Label materialize_true, materialize_false;
2672 Label* if_true = NULL;
2673 Label* if_false = NULL;
2674 Label* fall_through = NULL;
2675 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2676 &if_false, &fall_through);
2677
2678 __ JumpIfSmi(r2, if_false);
2679 __ CompareObjectType(r2, r3, r3, JS_ARRAY_TYPE);
2680 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2681 Split(eq, if_true, if_false, fall_through);
2682
2683 context()->Plug(if_true, if_false);
2684}
2685
2686void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2687 ZoneList<Expression*>* args = expr->arguments();
2688 DCHECK(args->length() == 1);
2689
2690 VisitForAccumulatorValue(args->at(0));
2691
2692 Label materialize_true, materialize_false;
2693 Label* if_true = NULL;
2694 Label* if_false = NULL;
2695 Label* fall_through = NULL;
2696 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2697 &if_false, &fall_through);
2698
2699 __ JumpIfSmi(r2, if_false);
2700 __ CompareObjectType(r2, r3, r3, JS_TYPED_ARRAY_TYPE);
2701 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2702 Split(eq, if_true, if_false, fall_through);
2703
2704 context()->Plug(if_true, if_false);
2705}
2706
2707void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2708 ZoneList<Expression*>* args = expr->arguments();
2709 DCHECK(args->length() == 1);
2710
2711 VisitForAccumulatorValue(args->at(0));
2712
2713 Label materialize_true, materialize_false;
2714 Label* if_true = NULL;
2715 Label* if_false = NULL;
2716 Label* fall_through = NULL;
2717 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2718 &if_false, &fall_through);
2719
2720 __ JumpIfSmi(r2, if_false);
2721 __ CompareObjectType(r2, r3, r3, JS_REGEXP_TYPE);
2722 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2723 Split(eq, if_true, if_false, fall_through);
2724
2725 context()->Plug(if_true, if_false);
2726}
2727
2728void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2729 ZoneList<Expression*>* args = expr->arguments();
2730 DCHECK(args->length() == 1);
2731
2732 VisitForAccumulatorValue(args->at(0));
2733
2734 Label materialize_true, materialize_false;
2735 Label* if_true = NULL;
2736 Label* if_false = NULL;
2737 Label* fall_through = NULL;
2738 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2739 &if_false, &fall_through);
2740
2741 __ JumpIfSmi(r2, if_false);
2742 __ CompareObjectType(r2, r3, r3, JS_PROXY_TYPE);
2743 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2744 Split(eq, if_true, if_false, fall_through);
2745
2746 context()->Plug(if_true, if_false);
2747}
2748
2749void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2750 ZoneList<Expression*>* args = expr->arguments();
2751 DCHECK(args->length() == 1);
2752 Label done, null, function, non_function_constructor;
2753
2754 VisitForAccumulatorValue(args->at(0));
2755
2756 // If the object is not a JSReceiver, we return null.
2757 __ JumpIfSmi(r2, &null);
2758 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2759 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE);
2760 // Map is now in r2.
2761 __ blt(&null);
2762
2763 // Return 'Function' for JSFunction and JSBoundFunction objects.
2764 __ CmpLogicalP(r3, Operand(FIRST_FUNCTION_TYPE));
2765 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2766 __ bge(&function);
2767
2768 // Check if the constructor in the map is a JS function.
2769 Register instance_type = r4;
2770 __ GetMapConstructor(r2, r2, r3, instance_type);
2771 __ CmpP(instance_type, Operand(JS_FUNCTION_TYPE));
2772 __ bne(&non_function_constructor, Label::kNear);
2773
2774 // r2 now contains the constructor function. Grab the
2775 // instance class name from there.
2776 __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset));
2777 __ LoadP(r2,
2778 FieldMemOperand(r2, SharedFunctionInfo::kInstanceClassNameOffset));
2779 __ b(&done, Label::kNear);
2780
2781 // Functions have class 'Function'.
2782 __ bind(&function);
2783 __ LoadRoot(r2, Heap::kFunction_stringRootIndex);
2784 __ b(&done, Label::kNear);
2785
2786 // Objects with a non-function constructor have class 'Object'.
2787 __ bind(&non_function_constructor);
2788 __ LoadRoot(r2, Heap::kObject_stringRootIndex);
2789 __ b(&done, Label::kNear);
2790
2791 // Non-JS objects have class null.
2792 __ bind(&null);
2793 __ LoadRoot(r2, Heap::kNullValueRootIndex);
2794
2795 // All done.
2796 __ bind(&done);
2797
2798 context()->Plug(r2);
2799}
2800
2801void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2802 ZoneList<Expression*>* args = expr->arguments();
2803 DCHECK(args->length() == 1);
2804 VisitForAccumulatorValue(args->at(0)); // Load the object.
2805
2806 Label done;
2807 // If the object is a smi return the object.
2808 __ JumpIfSmi(r2, &done);
2809 // If the object is not a value type, return the object.
2810 __ CompareObjectType(r2, r3, r3, JS_VALUE_TYPE);
2811 __ bne(&done, Label::kNear);
2812 __ LoadP(r2, FieldMemOperand(r2, JSValue::kValueOffset));
2813
2814 __ bind(&done);
2815 context()->Plug(r2);
2816}
2817
Ben Murdochda12d292016-06-02 14:46:10 +01002818void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2819 ZoneList<Expression*>* args = expr->arguments();
2820 DCHECK(args->length() == 1);
2821 VisitForAccumulatorValue(args->at(0));
2822
2823 Label done;
2824 StringCharFromCodeGenerator generator(r2, r3);
2825 generator.GenerateFast(masm_);
2826 __ b(&done);
2827
2828 NopRuntimeCallHelper call_helper;
2829 generator.GenerateSlow(masm_, call_helper);
2830
2831 __ bind(&done);
2832 context()->Plug(r3);
2833}
2834
2835void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2836 ZoneList<Expression*>* args = expr->arguments();
2837 DCHECK(args->length() == 2);
2838 VisitForStackValue(args->at(0));
2839 VisitForAccumulatorValue(args->at(1));
2840
2841 Register object = r3;
2842 Register index = r2;
2843 Register result = r5;
2844
2845 PopOperand(object);
2846
2847 Label need_conversion;
2848 Label index_out_of_range;
2849 Label done;
2850 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
Ben Murdoch61f157c2016-09-16 13:49:30 +01002851 &need_conversion, &index_out_of_range);
Ben Murdochda12d292016-06-02 14:46:10 +01002852 generator.GenerateFast(masm_);
2853 __ b(&done);
2854
2855 __ bind(&index_out_of_range);
2856 // When the index is out of range, the spec requires us to return
2857 // NaN.
2858 __ LoadRoot(result, Heap::kNanValueRootIndex);
2859 __ b(&done);
2860
2861 __ bind(&need_conversion);
2862 // Load the undefined value into the result register, which will
2863 // trigger conversion.
2864 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2865 __ b(&done);
2866
2867 NopRuntimeCallHelper call_helper;
2868 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2869
2870 __ bind(&done);
2871 context()->Plug(result);
2872}
2873
Ben Murdochda12d292016-06-02 14:46:10 +01002874void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2875 ZoneList<Expression*>* args = expr->arguments();
2876 DCHECK_LE(2, args->length());
2877 // Push target, receiver and arguments onto the stack.
2878 for (Expression* const arg : *args) {
2879 VisitForStackValue(arg);
2880 }
Ben Murdochc5610432016-08-08 18:44:38 +01002881 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002882 // Move target to r3.
2883 int const argc = args->length() - 2;
2884 __ LoadP(r3, MemOperand(sp, (argc + 1) * kPointerSize));
2885 // Call the target.
2886 __ mov(r2, Operand(argc));
2887 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2888 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002889 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002890 // Discard the function left on TOS.
2891 context()->DropAndPlug(1, r2);
2892}
2893
2894void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2895 ZoneList<Expression*>* args = expr->arguments();
2896 VisitForAccumulatorValue(args->at(0));
2897
2898 Label materialize_true, materialize_false;
2899 Label* if_true = NULL;
2900 Label* if_false = NULL;
2901 Label* fall_through = NULL;
2902 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2903 &if_false, &fall_through);
2904
2905 __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset));
2906 __ AndP(r0, r2, Operand(String::kContainsCachedArrayIndexMask));
2907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2908 Split(eq, if_true, if_false, fall_through);
2909
2910 context()->Plug(if_true, if_false);
2911}
2912
2913void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
2914 ZoneList<Expression*>* args = expr->arguments();
2915 DCHECK(args->length() == 1);
2916 VisitForAccumulatorValue(args->at(0));
2917
2918 __ AssertString(r2);
2919
2920 __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset));
2921 __ IndexFromHash(r2, r2);
2922
2923 context()->Plug(r2);
2924}
2925
2926void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2927 ZoneList<Expression*>* args = expr->arguments();
2928 DCHECK_EQ(1, args->length());
2929 VisitForAccumulatorValue(args->at(0));
2930 __ AssertFunction(r2);
2931 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2932 __ LoadP(r2, FieldMemOperand(r2, Map::kPrototypeOffset));
2933 context()->Plug(r2);
2934}
2935
Ben Murdochda12d292016-06-02 14:46:10 +01002936void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2937 DCHECK(expr->arguments()->length() == 0);
2938 ExternalReference debug_is_active =
2939 ExternalReference::debug_is_active_address(isolate());
2940 __ mov(ip, Operand(debug_is_active));
2941 __ LoadlB(r2, MemOperand(ip));
2942 __ SmiTag(r2);
2943 context()->Plug(r2);
2944}
2945
2946void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2947 ZoneList<Expression*>* args = expr->arguments();
2948 DCHECK_EQ(2, args->length());
2949 VisitForStackValue(args->at(0));
2950 VisitForStackValue(args->at(1));
2951
2952 Label runtime, done;
2953
Ben Murdochc5610432016-08-08 18:44:38 +01002954 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &runtime,
2955 NO_ALLOCATION_FLAGS);
Ben Murdochda12d292016-06-02 14:46:10 +01002956 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3);
2957 __ Pop(r4, r5);
2958 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
2959 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0);
2960 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
2961 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
2962 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0);
2963 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0);
2964 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2965 __ b(&done);
2966
2967 __ bind(&runtime);
2968 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2969
2970 __ bind(&done);
2971 context()->Plug(r2);
2972}
2973
2974void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2975 // Push function.
2976 __ LoadNativeContextSlot(expr->context_index(), r2);
2977 PushOperand(r2);
2978
2979 // Push undefined as the receiver.
2980 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2981 PushOperand(r2);
2982}
2983
2984void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2985 ZoneList<Expression*>* args = expr->arguments();
2986 int arg_count = args->length();
2987
2988 SetCallPosition(expr);
2989 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2990 __ mov(r2, Operand(arg_count));
2991 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2992 RelocInfo::CODE_TARGET);
2993 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002994 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01002995}
2996
2997void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2998 switch (expr->op()) {
2999 case Token::DELETE: {
3000 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3001 Property* property = expr->expression()->AsProperty();
3002 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3003
3004 if (property != NULL) {
3005 VisitForStackValue(property->obj());
3006 VisitForStackValue(property->key());
3007 CallRuntimeWithOperands(is_strict(language_mode())
3008 ? Runtime::kDeleteProperty_Strict
3009 : Runtime::kDeleteProperty_Sloppy);
3010 context()->Plug(r2);
3011 } else if (proxy != NULL) {
3012 Variable* var = proxy->var();
3013 // Delete of an unqualified identifier is disallowed in strict mode but
3014 // "delete this" is allowed.
3015 bool is_this = var->HasThisName(isolate());
3016 DCHECK(is_sloppy(language_mode()) || is_this);
3017 if (var->IsUnallocatedOrGlobalSlot()) {
3018 __ LoadGlobalObject(r4);
3019 __ mov(r3, Operand(var->name()));
3020 __ Push(r4, r3);
3021 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3022 context()->Plug(r2);
3023 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3024 // Result of deleting non-global, non-dynamic variables is false.
3025 // The subexpression does not have side effects.
3026 context()->Plug(is_this);
3027 } else {
3028 // Non-global variable. Call the runtime to try to delete from the
3029 // context where the variable was introduced.
3030 __ Push(var->name());
3031 __ CallRuntime(Runtime::kDeleteLookupSlot);
3032 context()->Plug(r2);
3033 }
3034 } else {
3035 // Result of deleting non-property, non-variable reference is true.
3036 // The subexpression may have side effects.
3037 VisitForEffect(expr->expression());
3038 context()->Plug(true);
3039 }
3040 break;
3041 }
3042
3043 case Token::VOID: {
3044 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3045 VisitForEffect(expr->expression());
3046 context()->Plug(Heap::kUndefinedValueRootIndex);
3047 break;
3048 }
3049
3050 case Token::NOT: {
3051 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3052 if (context()->IsEffect()) {
3053 // Unary NOT has no side effects so it's only necessary to visit the
3054 // subexpression. Match the optimizing compiler by not branching.
3055 VisitForEffect(expr->expression());
3056 } else if (context()->IsTest()) {
3057 const TestContext* test = TestContext::cast(context());
3058 // The labels are swapped for the recursive call.
3059 VisitForControl(expr->expression(), test->false_label(),
3060 test->true_label(), test->fall_through());
3061 context()->Plug(test->true_label(), test->false_label());
3062 } else {
3063 // We handle value contexts explicitly rather than simply visiting
3064 // for control and plugging the control flow into the context,
3065 // because we need to prepare a pair of extra administrative AST ids
3066 // for the optimizing compiler.
3067 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3068 Label materialize_true, materialize_false, done;
3069 VisitForControl(expr->expression(), &materialize_false,
3070 &materialize_true, &materialize_true);
3071 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3072 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003073 PrepareForBailoutForId(expr->MaterializeTrueId(),
3074 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01003075 __ LoadRoot(r2, Heap::kTrueValueRootIndex);
3076 if (context()->IsStackValue()) __ push(r2);
3077 __ b(&done);
3078 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003079 PrepareForBailoutForId(expr->MaterializeFalseId(),
3080 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01003081 __ LoadRoot(r2, Heap::kFalseValueRootIndex);
3082 if (context()->IsStackValue()) __ push(r2);
3083 __ bind(&done);
3084 }
3085 break;
3086 }
3087
3088 case Token::TYPEOF: {
3089 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3090 {
3091 AccumulatorValueContext context(this);
3092 VisitForTypeofValue(expr->expression());
3093 }
3094 __ LoadRR(r5, r2);
3095 TypeofStub typeof_stub(isolate());
3096 __ CallStub(&typeof_stub);
3097 context()->Plug(r2);
3098 break;
3099 }
3100
3101 default:
3102 UNREACHABLE();
3103 }
3104}
3105
3106void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3107 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3108
3109 Comment cmnt(masm_, "[ CountOperation");
3110
3111 Property* prop = expr->expression()->AsProperty();
3112 LhsKind assign_type = Property::GetAssignType(prop);
3113
3114 // Evaluate expression and get value.
3115 if (assign_type == VARIABLE) {
3116 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3117 AccumulatorValueContext context(this);
3118 EmitVariableLoad(expr->expression()->AsVariableProxy());
3119 } else {
3120 // Reserve space for result of postfix operation.
3121 if (expr->is_postfix() && !context()->IsEffect()) {
3122 __ LoadSmiLiteral(ip, Smi::FromInt(0));
3123 PushOperand(ip);
3124 }
3125 switch (assign_type) {
3126 case NAMED_PROPERTY: {
3127 // Put the object both on the stack and in the register.
3128 VisitForStackValue(prop->obj());
3129 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3130 EmitNamedPropertyLoad(prop);
3131 break;
3132 }
3133
3134 case NAMED_SUPER_PROPERTY: {
3135 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3136 VisitForAccumulatorValue(
3137 prop->obj()->AsSuperPropertyReference()->home_object());
3138 PushOperand(result_register());
3139 const Register scratch = r3;
3140 __ LoadP(scratch, MemOperand(sp, kPointerSize));
3141 PushOperands(scratch, result_register());
3142 EmitNamedSuperPropertyLoad(prop);
3143 break;
3144 }
3145
3146 case KEYED_SUPER_PROPERTY: {
3147 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3148 VisitForAccumulatorValue(
3149 prop->obj()->AsSuperPropertyReference()->home_object());
3150 const Register scratch = r3;
3151 const Register scratch1 = r4;
3152 __ LoadRR(scratch, result_register());
3153 VisitForAccumulatorValue(prop->key());
3154 PushOperands(scratch, result_register());
3155 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
3156 PushOperands(scratch1, scratch, result_register());
3157 EmitKeyedSuperPropertyLoad(prop);
3158 break;
3159 }
3160
3161 case KEYED_PROPERTY: {
3162 VisitForStackValue(prop->obj());
3163 VisitForStackValue(prop->key());
3164 __ LoadP(LoadDescriptor::ReceiverRegister(),
3165 MemOperand(sp, 1 * kPointerSize));
3166 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3167 EmitKeyedPropertyLoad(prop);
3168 break;
3169 }
3170
3171 case VARIABLE:
3172 UNREACHABLE();
3173 }
3174 }
3175
3176 // We need a second deoptimization point after loading the value
3177 // in case evaluating the property load my have a side effect.
3178 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003179 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003180 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003181 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003182 }
3183
3184 // Inline smi case if we are in a loop.
3185 Label stub_call, done;
3186 JumpPatchSite patch_site(masm_);
3187
3188 int count_value = expr->op() == Token::INC ? 1 : -1;
3189 if (ShouldInlineSmiCase(expr->op())) {
3190 Label slow;
3191 patch_site.EmitJumpIfNotSmi(r2, &slow);
3192
3193 // Save result for postfix expressions.
3194 if (expr->is_postfix()) {
3195 if (!context()->IsEffect()) {
3196 // Save the result on the stack. If we have a named or keyed property
3197 // we store the result under the receiver that is currently on top
3198 // of the stack.
3199 switch (assign_type) {
3200 case VARIABLE:
3201 __ push(r2);
3202 break;
3203 case NAMED_PROPERTY:
3204 __ StoreP(r2, MemOperand(sp, kPointerSize));
3205 break;
3206 case NAMED_SUPER_PROPERTY:
3207 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3208 break;
3209 case KEYED_PROPERTY:
3210 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3211 break;
3212 case KEYED_SUPER_PROPERTY:
3213 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize));
3214 break;
3215 }
3216 }
3217 }
3218
3219 Register scratch1 = r3;
3220 Register scratch2 = r4;
3221 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
3222 __ AddAndCheckForOverflow(r2, r2, scratch1, scratch2, r0);
3223 __ BranchOnNoOverflow(&done);
3224 // Call stub. Undo operation first.
3225 __ SubP(r2, r2, scratch1);
3226 __ b(&stub_call);
3227 __ bind(&slow);
3228 }
3229
3230 // Convert old value into a number.
Ben Murdoch61f157c2016-09-16 13:49:30 +01003231 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdochc5610432016-08-08 18:44:38 +01003232 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003233
3234 // Save result for postfix expressions.
3235 if (expr->is_postfix()) {
3236 if (!context()->IsEffect()) {
3237 // Save the result on the stack. If we have a named or keyed property
3238 // we store the result under the receiver that is currently on top
3239 // of the stack.
3240 switch (assign_type) {
3241 case VARIABLE:
3242 PushOperand(r2);
3243 break;
3244 case NAMED_PROPERTY:
3245 __ StoreP(r2, MemOperand(sp, kPointerSize));
3246 break;
3247 case NAMED_SUPER_PROPERTY:
3248 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3249 break;
3250 case KEYED_PROPERTY:
3251 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3252 break;
3253 case KEYED_SUPER_PROPERTY:
3254 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize));
3255 break;
3256 }
3257 }
3258 }
3259
3260 __ bind(&stub_call);
3261 __ LoadRR(r3, r2);
3262 __ LoadSmiLiteral(r2, Smi::FromInt(count_value));
3263
3264 SetExpressionPosition(expr);
3265
3266 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3267 CallIC(code, expr->CountBinOpFeedbackId());
3268 patch_site.EmitPatchInfo();
3269 __ bind(&done);
3270
3271 // Store the value returned in r2.
3272 switch (assign_type) {
3273 case VARIABLE:
3274 if (expr->is_postfix()) {
3275 {
3276 EffectContext context(this);
3277 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3278 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003279 PrepareForBailoutForId(expr->AssignmentId(),
3280 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003281 context.Plug(r2);
3282 }
3283 // For all contexts except EffectConstant We have the result on
3284 // top of the stack.
3285 if (!context()->IsEffect()) {
3286 context()->PlugTOS();
3287 }
3288 } else {
3289 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3290 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003291 PrepareForBailoutForId(expr->AssignmentId(),
3292 BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003293 context()->Plug(r2);
3294 }
3295 break;
3296 case NAMED_PROPERTY: {
3297 __ mov(StoreDescriptor::NameRegister(),
3298 Operand(prop->key()->AsLiteral()->value()));
3299 PopOperand(StoreDescriptor::ReceiverRegister());
3300 EmitLoadStoreICSlot(expr->CountSlot());
3301 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003302 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003303 if (expr->is_postfix()) {
3304 if (!context()->IsEffect()) {
3305 context()->PlugTOS();
3306 }
3307 } else {
3308 context()->Plug(r2);
3309 }
3310 break;
3311 }
3312 case NAMED_SUPER_PROPERTY: {
3313 EmitNamedSuperPropertyStore(prop);
3314 if (expr->is_postfix()) {
3315 if (!context()->IsEffect()) {
3316 context()->PlugTOS();
3317 }
3318 } else {
3319 context()->Plug(r2);
3320 }
3321 break;
3322 }
3323 case KEYED_SUPER_PROPERTY: {
3324 EmitKeyedSuperPropertyStore(prop);
3325 if (expr->is_postfix()) {
3326 if (!context()->IsEffect()) {
3327 context()->PlugTOS();
3328 }
3329 } else {
3330 context()->Plug(r2);
3331 }
3332 break;
3333 }
3334 case KEYED_PROPERTY: {
3335 PopOperands(StoreDescriptor::ReceiverRegister(),
3336 StoreDescriptor::NameRegister());
3337 Handle<Code> ic =
3338 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3339 EmitLoadStoreICSlot(expr->CountSlot());
3340 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003341 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdochda12d292016-06-02 14:46:10 +01003342 if (expr->is_postfix()) {
3343 if (!context()->IsEffect()) {
3344 context()->PlugTOS();
3345 }
3346 } else {
3347 context()->Plug(r2);
3348 }
3349 break;
3350 }
3351 }
3352}
3353
3354void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3355 Expression* sub_expr,
3356 Handle<String> check) {
3357 Label materialize_true, materialize_false;
3358 Label* if_true = NULL;
3359 Label* if_false = NULL;
3360 Label* fall_through = NULL;
3361 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3362 &if_false, &fall_through);
3363
3364 {
3365 AccumulatorValueContext context(this);
3366 VisitForTypeofValue(sub_expr);
3367 }
3368 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3369
3370 Factory* factory = isolate()->factory();
3371 if (String::Equals(check, factory->number_string())) {
3372 __ JumpIfSmi(r2, if_true);
3373 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3374 __ CompareRoot(r2, Heap::kHeapNumberMapRootIndex);
3375 Split(eq, if_true, if_false, fall_through);
3376 } else if (String::Equals(check, factory->string_string())) {
3377 __ JumpIfSmi(r2, if_false);
3378 __ CompareObjectType(r2, r2, r3, FIRST_NONSTRING_TYPE);
3379 Split(lt, if_true, if_false, fall_through);
3380 } else if (String::Equals(check, factory->symbol_string())) {
3381 __ JumpIfSmi(r2, if_false);
3382 __ CompareObjectType(r2, r2, r3, SYMBOL_TYPE);
3383 Split(eq, if_true, if_false, fall_through);
3384 } else if (String::Equals(check, factory->boolean_string())) {
3385 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3386 __ beq(if_true);
3387 __ CompareRoot(r2, Heap::kFalseValueRootIndex);
3388 Split(eq, if_true, if_false, fall_through);
3389 } else if (String::Equals(check, factory->undefined_string())) {
3390 __ CompareRoot(r2, Heap::kNullValueRootIndex);
3391 __ beq(if_false);
3392 __ JumpIfSmi(r2, if_false);
3393 // Check for undetectable objects => true.
3394 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3395 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset),
3396 Operand(1 << Map::kIsUndetectable));
3397 Split(ne, if_true, if_false, fall_through);
3398
3399 } else if (String::Equals(check, factory->function_string())) {
3400 __ JumpIfSmi(r2, if_false);
3401 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3402 __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
3403 __ AndP(r3, r3,
3404 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3405 __ CmpP(r3, Operand(1 << Map::kIsCallable));
3406 Split(eq, if_true, if_false, fall_through);
3407 } else if (String::Equals(check, factory->object_string())) {
3408 __ JumpIfSmi(r2, if_false);
3409 __ CompareRoot(r2, Heap::kNullValueRootIndex);
3410 __ beq(if_true);
3411 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3412 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE);
3413 __ blt(if_false);
3414 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset),
3415 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3416 Split(eq, if_true, if_false, fall_through);
3417// clang-format off
3418#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3419 } else if (String::Equals(check, factory->type##_string())) { \
3420 __ JumpIfSmi(r2, if_false); \
3421 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); \
3422 __ CompareRoot(r2, Heap::k##Type##MapRootIndex); \
3423 Split(eq, if_true, if_false, fall_through);
3424 SIMD128_TYPES(SIMD128_TYPE)
3425#undef SIMD128_TYPE
3426 // clang-format on
3427 } else {
3428 if (if_false != fall_through) __ b(if_false);
3429 }
3430 context()->Plug(if_true, if_false);
3431}
3432
3433void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3434 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdochda12d292016-06-02 14:46:10 +01003435
3436 // First we try a fast inlined version of the compare when one of
3437 // the operands is a literal.
3438 if (TryLiteralCompare(expr)) return;
3439
3440 // Always perform the comparison for its control flow. Pack the result
3441 // into the expression's context after the comparison is performed.
3442 Label materialize_true, materialize_false;
3443 Label* if_true = NULL;
3444 Label* if_false = NULL;
3445 Label* fall_through = NULL;
3446 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3447 &if_false, &fall_through);
3448
3449 Token::Value op = expr->op();
3450 VisitForStackValue(expr->left());
3451 switch (op) {
3452 case Token::IN:
3453 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003454 SetExpressionPosition(expr);
3455 EmitHasProperty();
Ben Murdochda12d292016-06-02 14:46:10 +01003456 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3457 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3458 Split(eq, if_true, if_false, fall_through);
3459 break;
3460
3461 case Token::INSTANCEOF: {
3462 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003463 SetExpressionPosition(expr);
Ben Murdochda12d292016-06-02 14:46:10 +01003464 PopOperand(r3);
3465 InstanceOfStub stub(isolate());
3466 __ CallStub(&stub);
3467 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3468 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3469 Split(eq, if_true, if_false, fall_through);
3470 break;
3471 }
3472
3473 default: {
3474 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003475 SetExpressionPosition(expr);
Ben Murdochda12d292016-06-02 14:46:10 +01003476 Condition cond = CompareIC::ComputeCondition(op);
3477 PopOperand(r3);
3478
3479 bool inline_smi_code = ShouldInlineSmiCase(op);
3480 JumpPatchSite patch_site(masm_);
3481 if (inline_smi_code) {
3482 Label slow_case;
3483 __ LoadRR(r4, r3);
3484 __ OrP(r4, r2);
3485 patch_site.EmitJumpIfNotSmi(r4, &slow_case);
3486 __ CmpP(r3, r2);
3487 Split(cond, if_true, if_false, NULL);
3488 __ bind(&slow_case);
3489 }
3490
3491 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3492 CallIC(ic, expr->CompareOperationFeedbackId());
3493 patch_site.EmitPatchInfo();
3494 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3495 __ CmpP(r2, Operand::Zero());
3496 Split(cond, if_true, if_false, fall_through);
3497 }
3498 }
3499
3500 // Convert the result of the comparison into one expected for this
3501 // expression's context.
3502 context()->Plug(if_true, if_false);
3503}
3504
3505void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3506 Expression* sub_expr,
3507 NilValue nil) {
3508 Label materialize_true, materialize_false;
3509 Label* if_true = NULL;
3510 Label* if_false = NULL;
3511 Label* fall_through = NULL;
3512 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3513 &if_false, &fall_through);
3514
3515 VisitForAccumulatorValue(sub_expr);
3516 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3517 if (expr->op() == Token::EQ_STRICT) {
3518 Heap::RootListIndex nil_value = nil == kNullValue
3519 ? Heap::kNullValueRootIndex
3520 : Heap::kUndefinedValueRootIndex;
3521 __ CompareRoot(r2, nil_value);
3522 Split(eq, if_true, if_false, fall_through);
3523 } else {
3524 __ JumpIfSmi(r2, if_false);
3525 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3526 __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
3527 __ AndP(r0, r3, Operand(1 << Map::kIsUndetectable));
3528 Split(ne, if_true, if_false, fall_through);
3529 }
3530 context()->Plug(if_true, if_false);
3531}
3532Register FullCodeGenerator::result_register() { return r2; }
3533
3534Register FullCodeGenerator::context_register() { return cp; }
3535
3536void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3537 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3538 __ LoadP(value, MemOperand(fp, frame_offset));
3539}
3540
3541void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3542 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3543 __ StoreP(value, MemOperand(fp, frame_offset));
3544}
3545
3546void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3547 __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
3548}
3549
3550void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3551 Scope* closure_scope = scope()->ClosureScope();
3552 if (closure_scope->is_script_scope() || closure_scope->is_module_scope()) {
3553 // Contexts nested in the native context have a canonical empty function
3554 // as their closure, not the anonymous closure containing the global
3555 // code.
3556 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3557 } else if (closure_scope->is_eval_scope()) {
3558 // Contexts created by a call to eval have the same closure as the
3559 // context calling eval, not the anonymous closure containing the eval
3560 // code. Fetch it from the context.
3561 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3562 } else {
3563 DCHECK(closure_scope->is_function_scope());
3564 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3565 }
3566 PushOperand(ip);
3567}
3568
3569// ----------------------------------------------------------------------------
3570// Non-local control flow support.
3571
3572void FullCodeGenerator::EnterFinallyBlock() {
3573 DCHECK(!result_register().is(r3));
3574 // Store pending message while executing finally block.
3575 ExternalReference pending_message_obj =
3576 ExternalReference::address_of_pending_message_obj(isolate());
3577 __ mov(ip, Operand(pending_message_obj));
3578 __ LoadP(r3, MemOperand(ip));
3579 PushOperand(r3);
3580
3581 ClearPendingMessage();
3582}
3583
3584void FullCodeGenerator::ExitFinallyBlock() {
3585 DCHECK(!result_register().is(r3));
3586 // Restore pending message from stack.
3587 PopOperand(r3);
3588 ExternalReference pending_message_obj =
3589 ExternalReference::address_of_pending_message_obj(isolate());
3590 __ mov(ip, Operand(pending_message_obj));
3591 __ StoreP(r3, MemOperand(ip));
3592}
3593
3594void FullCodeGenerator::ClearPendingMessage() {
3595 DCHECK(!result_register().is(r3));
3596 ExternalReference pending_message_obj =
3597 ExternalReference::address_of_pending_message_obj(isolate());
3598 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
3599 __ mov(ip, Operand(pending_message_obj));
3600 __ StoreP(r3, MemOperand(ip));
3601}
3602
3603void FullCodeGenerator::DeferredCommands::EmitCommands() {
3604 DCHECK(!result_register().is(r3));
3605 // Restore the accumulator (r2) and token (r3).
3606 __ Pop(r3, result_register());
3607 for (DeferredCommand cmd : commands_) {
3608 Label skip;
3609 __ CmpSmiLiteral(r3, Smi::FromInt(cmd.token), r0);
3610 __ bne(&skip);
3611 switch (cmd.command) {
3612 case kReturn:
3613 codegen_->EmitUnwindAndReturn();
3614 break;
3615 case kThrow:
3616 __ Push(result_register());
3617 __ CallRuntime(Runtime::kReThrow);
3618 break;
3619 case kContinue:
3620 codegen_->EmitContinue(cmd.target);
3621 break;
3622 case kBreak:
3623 codegen_->EmitBreak(cmd.target);
3624 break;
3625 }
3626 __ bind(&skip);
3627 }
3628}
3629
3630#undef __
3631
3632#if V8_TARGET_ARCH_S390X
3633static const FourByteInstr kInterruptBranchInstruction = 0xA7A40011;
3634static const FourByteInstr kOSRBranchInstruction = 0xA7040011;
3635static const int16_t kBackEdgeBranchOffset = 0x11 * 2;
3636#else
3637static const FourByteInstr kInterruptBranchInstruction = 0xA7A4000D;
3638static const FourByteInstr kOSRBranchInstruction = 0xA704000D;
3639static const int16_t kBackEdgeBranchOffset = 0xD * 2;
3640#endif
3641
3642void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
3643 BackEdgeState target_state,
3644 Code* replacement_code) {
3645 Address call_address = Assembler::target_address_from_return_address(pc);
3646 Address branch_address = call_address - 4;
3647 Isolate* isolate = unoptimized_code->GetIsolate();
3648 CodePatcher patcher(isolate, branch_address, 4);
3649
3650 switch (target_state) {
3651 case INTERRUPT: {
3652 // <decrement profiling counter>
3653 // bge <ok> ;; patched to GE BRC
3654 // brasrl r14, <interrupt stub address>
3655 // <reset profiling counter>
3656 // ok-label
3657 patcher.masm()->brc(ge, Operand(kBackEdgeBranchOffset));
3658 break;
3659 }
3660 case ON_STACK_REPLACEMENT:
3661 // <decrement profiling counter>
3662 // brc 0x0, <ok> ;; patched to NOP BRC
3663 // brasrl r14, <interrupt stub address>
3664 // <reset profiling counter>
3665 // ok-label ----- pc_after points here
3666 patcher.masm()->brc(CC_NOP, Operand(kBackEdgeBranchOffset));
3667 break;
3668 }
3669
3670 // Replace the stack check address in the mov sequence with the
3671 // entry address of the replacement code.
3672 Assembler::set_target_address_at(isolate, call_address, unoptimized_code,
3673 replacement_code->entry());
3674
3675 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3676 unoptimized_code, call_address, replacement_code);
3677}
3678
3679BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3680 Isolate* isolate, Code* unoptimized_code, Address pc) {
3681 Address call_address = Assembler::target_address_from_return_address(pc);
3682 Address branch_address = call_address - 4;
3683#ifdef DEBUG
3684 Address interrupt_address =
3685 Assembler::target_address_at(call_address, unoptimized_code);
3686#endif
3687
3688 DCHECK(BRC == Instruction::S390OpcodeValue(branch_address));
3689 // For interrupt, we expect a branch greater than or equal
3690 // i.e. BRC 0xa, +XXXX (0xA7A4XXXX)
3691 FourByteInstr br_instr = Instruction::InstructionBits(
3692 reinterpret_cast<const byte*>(branch_address));
3693 if (kInterruptBranchInstruction == br_instr) {
3694 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
3695 return INTERRUPT;
3696 }
3697
3698 // Expect BRC to be patched to NOP branch.
3699 // i.e. BRC 0x0, +XXXX (0xA704XXXX)
3700 USE(kOSRBranchInstruction);
3701 DCHECK(kOSRBranchInstruction == br_instr);
3702
3703 DCHECK(interrupt_address ==
3704 isolate->builtins()->OnStackReplacement()->entry());
3705 return ON_STACK_REPLACEMENT;
3706}
3707
3708} // namespace internal
3709} // namespace v8
3710#endif // V8_TARGET_ARCH_S390