blob: 24a2a387331f4920ec756fbee8d071e712b1b714 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_PPC
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/ppc/code-stubs-ppc.h"
17#include "src/ppc/macro-assembler-ppc.h"
18
19namespace v8 {
20namespace internal {
21
Ben Murdoch097c5b22016-05-18 11:27:45 +010022#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30// See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
31class JumpPatchSite BASE_EMBEDDED {
32 public:
33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
34#ifdef DEBUG
35 info_emitted_ = false;
36#endif
37 }
38
39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
40
41 // When initially emitting this ensure that a jump is always generated to skip
42 // the inlined smi code.
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 DCHECK(!patch_site_.is_bound() && !info_emitted_);
45 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
46 __ bind(&patch_site_);
47 __ cmp(reg, reg, cr0);
48 __ beq(target, cr0); // Always taken before patched.
49 }
50
51 // When initially emitting this ensure that a jump is never generated to skip
52 // the inlined smi code.
53 void EmitJumpIfSmi(Register reg, Label* target) {
54 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 __ bind(&patch_site_);
57 __ cmp(reg, reg, cr0);
58 __ bne(target, cr0); // Never taken before patched.
59 }
60
61 void EmitPatchInfo() {
62 if (patch_site_.is_bound()) {
63 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
64 Register reg;
65 // I believe this is using reg as the high bits of of the offset
66 reg.set_code(delta_to_patch_site / kOff16Mask);
67 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
68#ifdef DEBUG
69 info_emitted_ = true;
70#endif
71 } else {
72 __ nop(); // Signals no inlined code.
73 }
74 }
75
76 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010077 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 MacroAssembler* masm_;
79 Label patch_site_;
80#ifdef DEBUG
81 bool info_emitted_;
82#endif
83};
84
85
86// Generate code for a JS function. On entry to the function the receiver
87// and arguments have been pushed on the stack left to right. The actual
88// argument count matches the formal parameter count expected by the
89// function.
90//
91// The live registers are:
92// o r4: the JS function object being called (i.e., ourselves)
93// o r6: the new target value
94// o cp: our context
95// o fp: our caller's frame pointer (aka r31)
96// o sp: stack pointer
97// o lr: return address
98// o ip: our own function entry (required by the prologue)
99//
100// The function builds a JS frame. Please see JavaScriptFrameConstants in
101// frames-ppc.h for its layout.
102void FullCodeGenerator::Generate() {
103 CompilationInfo* info = info_;
104 profiling_counter_ = isolate()->factory()->NewCell(
105 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
106 SetFunctionPosition(literal());
107 Comment cmnt(masm_, "[ function compiled by full code generator");
108
109 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
112 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
113 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
114 __ AssertNotSmi(r5);
115 __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE);
116 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
117 }
118
119 // Open a frame scope to indicate that there is a frame on the stack. The
120 // MANUAL indicates that the scope shouldn't actually generate code to set up
121 // the frame (that is done below).
122 FrameScope frame_scope(masm_, StackFrame::MANUAL);
123 int prologue_offset = masm_->pc_offset();
124
125 if (prologue_offset) {
126 // Prologue logic requires it's starting address in ip and the
127 // corresponding offset from the function entry.
128 prologue_offset += Instruction::kInstrSize;
129 __ addi(ip, ip, Operand(prologue_offset));
130 }
131 info->set_prologue_offset(prologue_offset);
132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
133
134 {
135 Comment cmnt(masm_, "[ Allocate locals");
136 int locals_count = info->scope()->num_stack_slots();
137 // Generators allocate locals, if any, in context slots.
138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140 if (locals_count > 0) {
141 if (locals_count >= 128) {
142 Label ok;
143 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
145 __ cmpl(ip, r5);
146 __ bc_short(ge, &ok);
147 __ CallRuntime(Runtime::kThrowStackOverflow);
148 __ bind(&ok);
149 }
150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
152 if (locals_count >= kMaxPushes) {
153 int loop_iterations = locals_count / kMaxPushes;
154 __ mov(r5, Operand(loop_iterations));
155 __ mtctr(r5);
156 Label loop_header;
157 __ bind(&loop_header);
158 // Do pushes.
159 for (int i = 0; i < kMaxPushes; i++) {
160 __ push(ip);
161 }
162 // Continue loop if not done.
163 __ bdnz(&loop_header);
164 }
165 int remaining = locals_count % kMaxPushes;
166 // Emit the remaining pushes.
167 for (int i = 0; i < remaining; i++) {
168 __ push(ip);
169 }
170 }
171 }
172
173 bool function_in_register_r4 = true;
174
175 // Possibly allocate a local context.
176 if (info->scope()->num_heap_slots() > 0) {
177 // Argument to NewContext is the function, which is still in r4.
178 Comment cmnt(masm_, "[ Allocate context");
179 bool need_write_barrier = true;
180 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
181 if (info->scope()->is_script_scope()) {
182 __ push(r4);
183 __ Push(info->scope()->GetScopeInfo(info->isolate()));
184 __ CallRuntime(Runtime::kNewScriptContext);
185 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
186 // The new target value is not used, clobbering is safe.
187 DCHECK_NULL(info->scope()->new_target_var());
188 } else {
189 if (info->scope()->new_target_var() != nullptr) {
190 __ push(r6); // Preserve new target.
191 }
192 if (slots <= FastNewContextStub::kMaximumSlots) {
193 FastNewContextStub stub(isolate(), slots);
194 __ CallStub(&stub);
195 // Result of FastNewContextStub is always in new space.
196 need_write_barrier = false;
197 } else {
198 __ push(r4);
199 __ CallRuntime(Runtime::kNewFunctionContext);
200 }
201 if (info->scope()->new_target_var() != nullptr) {
202 __ pop(r6); // Preserve new target.
203 }
204 }
205 function_in_register_r4 = false;
206 // Context is returned in r3. It replaces the context passed to us.
207 // It's saved in the stack and kept live in cp.
208 __ mr(cp, r3);
209 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
210 // Copy any necessary parameters into the context.
211 int num_parameters = info->scope()->num_parameters();
212 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
213 for (int i = first_parameter; i < num_parameters; i++) {
214 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
215 if (var->IsContextSlot()) {
216 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
217 (num_parameters - 1 - i) * kPointerSize;
218 // Load parameter from stack.
219 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
220 // Store it in the context.
221 MemOperand target = ContextMemOperand(cp, var->index());
222 __ StoreP(r3, target, r0);
223
224 // Update the write barrier.
225 if (need_write_barrier) {
226 __ RecordWriteContextSlot(cp, target.offset(), r3, r5,
227 kLRHasBeenSaved, kDontSaveFPRegs);
228 } else if (FLAG_debug_code) {
229 Label done;
230 __ JumpIfInNewSpace(cp, r3, &done);
231 __ Abort(kExpectedNewSpaceObject);
232 __ bind(&done);
233 }
234 }
235 }
236 }
237
238 // Register holding this function and new target are both trashed in case we
239 // bailout here. But since that can happen only when new target is not used
240 // and we allocate a context, the value of |function_in_register| is correct.
241 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
242
243 // Possibly set up a local binding to the this function which is used in
244 // derived constructors with super calls.
245 Variable* this_function_var = scope()->this_function_var();
246 if (this_function_var != nullptr) {
247 Comment cmnt(masm_, "[ This function");
248 if (!function_in_register_r4) {
249 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
250 // The write barrier clobbers register again, keep it marked as such.
251 }
252 SetVar(this_function_var, r4, r3, r5);
253 }
254
255 // Possibly set up a local binding to the new target value.
256 Variable* new_target_var = scope()->new_target_var();
257 if (new_target_var != nullptr) {
258 Comment cmnt(masm_, "[ new.target");
259 SetVar(new_target_var, r6, r3, r5);
260 }
261
262 // Possibly allocate RestParameters
263 int rest_index;
264 Variable* rest_param = scope()->rest_parameter(&rest_index);
265 if (rest_param) {
266 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100267 if (!function_in_register_r4) {
268 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
269 }
270 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100272 function_in_register_r4 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 SetVar(rest_param, r3, r4, r5);
274 }
275
276 Variable* arguments = scope()->arguments();
277 if (arguments != NULL) {
278 // Function uses arguments object.
279 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280 if (!function_in_register_r4) {
281 // Load this again, if it's used by the local context below.
282 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
283 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100284 if (is_strict(language_mode()) || !has_simple_parameters()) {
285 FastNewStrictArgumentsStub stub(isolate());
286 __ CallStub(&stub);
287 } else if (literal()->has_duplicate_parameters()) {
288 __ Push(r4);
289 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
290 } else {
291 FastNewSloppyArgumentsStub stub(isolate());
292 __ CallStub(&stub);
293 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294
295 SetVar(arguments, r3, r4, r5);
296 }
297
298 if (FLAG_trace) {
299 __ CallRuntime(Runtime::kTraceEnter);
300 }
301
302 // Visit the declarations and body unless there is an illegal
303 // redeclaration.
304 if (scope()->HasIllegalRedeclaration()) {
305 Comment cmnt(masm_, "[ Declarations");
306 VisitForEffect(scope()->GetIllegalRedeclaration());
307
308 } else {
309 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
310 {
311 Comment cmnt(masm_, "[ Declarations");
312 VisitDeclarations(scope()->declarations());
313 }
314
315 // Assert that the declarations do not use ICs. Otherwise the debugger
316 // won't be able to redirect a PC at an IC to the correct IC in newly
317 // recompiled code.
318 DCHECK_EQ(0, ic_total_count_);
319
320 {
321 Comment cmnt(masm_, "[ Stack check");
322 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
323 Label ok;
324 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
325 __ cmpl(sp, ip);
326 __ bc_short(ge, &ok);
327 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
328 __ bind(&ok);
329 }
330
331 {
332 Comment cmnt(masm_, "[ Body");
333 DCHECK(loop_depth() == 0);
334 VisitStatements(literal()->body());
335 DCHECK(loop_depth() == 0);
336 }
337 }
338
339 // Always emit a 'return undefined' in case control fell off the end of
340 // the body.
341 {
342 Comment cmnt(masm_, "[ return <undefined>;");
343 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
344 }
345 EmitReturnSequence();
346
347 if (HasStackOverflow()) {
348 masm_->AbortConstantPoolBuilding();
349 }
350}
351
352
353void FullCodeGenerator::ClearAccumulator() {
354 __ LoadSmiLiteral(r3, Smi::FromInt(0));
355}
356
357
358void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
359 __ mov(r5, Operand(profiling_counter_));
360 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
361 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
362 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
363}
364
365
366void FullCodeGenerator::EmitProfilingCounterReset() {
367 int reset_value = FLAG_interrupt_budget;
368 __ mov(r5, Operand(profiling_counter_));
369 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
370 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
371}
372
373
374void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
375 Label* back_edge_target) {
376 Comment cmnt(masm_, "[ Back edge bookkeeping");
377 Label ok;
378
379 DCHECK(back_edge_target->is_bound());
380 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
381 kCodeSizeMultiplier / 2;
382 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
383 EmitProfilingCounterDecrement(weight);
384 {
385 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
386 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
387 // BackEdgeTable::PatchAt manipulates this sequence.
388 __ cmpi(r6, Operand::Zero());
389 __ bc_short(ge, &ok);
390 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
391
392 // Record a mapping of this PC offset to the OSR id. This is used to find
393 // the AST id from the unoptimized code in order to use it as a key into
394 // the deoptimization input data found in the optimized code.
395 RecordBackEdge(stmt->OsrEntryId());
396 }
397 EmitProfilingCounterReset();
398
399 __ bind(&ok);
400 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
401 // Record a mapping of the OSR id to this PC. This is used if the OSR
402 // entry becomes the target of a bailout. We don't expect it to be, but
403 // we want it to work if it is.
404 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
405}
406
Ben Murdoch097c5b22016-05-18 11:27:45 +0100407void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
408 bool is_tail_call) {
409 // Pretend that the exit is a backwards jump to the entry.
410 int weight = 1;
411 if (info_->ShouldSelfOptimize()) {
412 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413 } else {
414 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
415 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
416 }
417 EmitProfilingCounterDecrement(weight);
418 Label ok;
419 __ cmpi(r6, Operand::Zero());
420 __ bge(&ok);
421 // Don't need to save result register if we are going to do a tail call.
422 if (!is_tail_call) {
423 __ push(r3);
424 }
425 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
426 if (!is_tail_call) {
427 __ pop(r3);
428 }
429 EmitProfilingCounterReset();
430 __ bind(&ok);
431}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000432
433void FullCodeGenerator::EmitReturnSequence() {
434 Comment cmnt(masm_, "[ Return sequence");
435 if (return_label_.is_bound()) {
436 __ b(&return_label_);
437 } else {
438 __ bind(&return_label_);
439 if (FLAG_trace) {
440 // Push the return value on the stack as the parameter.
441 // Runtime::TraceExit returns its parameter in r3
442 __ push(r3);
443 __ CallRuntime(Runtime::kTraceExit);
444 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100445 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000446
447 // Make sure that the constant pool is not emitted inside of the return
448 // sequence.
449 {
450 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
451 int32_t arg_count = info_->scope()->num_parameters() + 1;
452 int32_t sp_delta = arg_count * kPointerSize;
453 SetReturnPosition(literal());
454 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
455 __ blr();
456 }
457 }
458}
459
460
461void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
462 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
463 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000465}
466
467
468void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
469
470
471void FullCodeGenerator::AccumulatorValueContext::Plug(
472 Heap::RootListIndex index) const {
473 __ LoadRoot(result_register(), index);
474}
475
476
477void FullCodeGenerator::StackValueContext::Plug(
478 Heap::RootListIndex index) const {
479 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100480 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000481}
482
483
484void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
485 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
486 false_label_);
487 if (index == Heap::kUndefinedValueRootIndex ||
488 index == Heap::kNullValueRootIndex ||
489 index == Heap::kFalseValueRootIndex) {
490 if (false_label_ != fall_through_) __ b(false_label_);
491 } else if (index == Heap::kTrueValueRootIndex) {
492 if (true_label_ != fall_through_) __ b(true_label_);
493 } else {
494 __ LoadRoot(result_register(), index);
495 codegen()->DoTest(this);
496 }
497}
498
499
500void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
501
502
503void FullCodeGenerator::AccumulatorValueContext::Plug(
504 Handle<Object> lit) const {
505 __ mov(result_register(), Operand(lit));
506}
507
508
509void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
510 // Immediates cannot be pushed directly.
511 __ mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100512 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513}
514
515
516void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
517 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
518 false_label_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100519 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
521 if (false_label_ != fall_through_) __ b(false_label_);
522 } else if (lit->IsTrue() || lit->IsJSObject()) {
523 if (true_label_ != fall_through_) __ b(true_label_);
524 } else if (lit->IsString()) {
525 if (String::cast(*lit)->length() == 0) {
526 if (false_label_ != fall_through_) __ b(false_label_);
527 } else {
528 if (true_label_ != fall_through_) __ b(true_label_);
529 }
530 } else if (lit->IsSmi()) {
531 if (Smi::cast(*lit)->value() == 0) {
532 if (false_label_ != fall_through_) __ b(false_label_);
533 } else {
534 if (true_label_ != fall_through_) __ b(true_label_);
535 }
536 } else {
537 // For simplicity we always test the accumulator register.
538 __ mov(result_register(), Operand(lit));
539 codegen()->DoTest(this);
540 }
541}
542
543
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
545 Register reg) const {
546 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100547 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000548 __ StoreP(reg, MemOperand(sp, 0));
549}
550
551
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
553 Label* materialize_false) const {
554 DCHECK(materialize_true == materialize_false);
555 __ bind(materialize_true);
556}
557
558
559void FullCodeGenerator::AccumulatorValueContext::Plug(
560 Label* materialize_true, Label* materialize_false) const {
561 Label done;
562 __ bind(materialize_true);
563 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
564 __ b(&done);
565 __ bind(materialize_false);
566 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
567 __ bind(&done);
568}
569
570
571void FullCodeGenerator::StackValueContext::Plug(
572 Label* materialize_true, Label* materialize_false) const {
573 Label done;
574 __ bind(materialize_true);
575 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
576 __ b(&done);
577 __ bind(materialize_false);
578 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
579 __ bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100580 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000581}
582
583
584void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
585 Label* materialize_false) const {
586 DCHECK(materialize_true == true_label_);
587 DCHECK(materialize_false == false_label_);
588}
589
590
591void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
592 Heap::RootListIndex value_root_index =
593 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
594 __ LoadRoot(result_register(), value_root_index);
595}
596
597
598void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
599 Heap::RootListIndex value_root_index =
600 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
601 __ LoadRoot(ip, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100602 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000603}
604
605
606void FullCodeGenerator::TestContext::Plug(bool flag) const {
607 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
608 false_label_);
609 if (flag) {
610 if (true_label_ != fall_through_) __ b(true_label_);
611 } else {
612 if (false_label_ != fall_through_) __ b(false_label_);
613 }
614}
615
616
617void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
618 Label* if_false, Label* fall_through) {
619 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
620 CallIC(ic, condition->test_id());
621 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
622 Split(eq, if_true, if_false, fall_through);
623}
624
625
626void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
627 Label* fall_through, CRegister cr) {
628 if (if_false == fall_through) {
629 __ b(cond, if_true, cr);
630 } else if (if_true == fall_through) {
631 __ b(NegateCondition(cond), if_false, cr);
632 } else {
633 __ b(cond, if_true, cr);
634 __ b(if_false);
635 }
636}
637
638
639MemOperand FullCodeGenerator::StackOperand(Variable* var) {
640 DCHECK(var->IsStackAllocated());
641 // Offset is negative because higher indexes are at lower addresses.
642 int offset = -var->index() * kPointerSize;
643 // Adjust by a (parameter or local) base offset.
644 if (var->IsParameter()) {
645 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
646 } else {
647 offset += JavaScriptFrameConstants::kLocal0Offset;
648 }
649 return MemOperand(fp, offset);
650}
651
652
653MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
654 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
655 if (var->IsContextSlot()) {
656 int context_chain_length = scope()->ContextChainLength(var->scope());
657 __ LoadContext(scratch, context_chain_length);
658 return ContextMemOperand(scratch, var->index());
659 } else {
660 return StackOperand(var);
661 }
662}
663
664
665void FullCodeGenerator::GetVar(Register dest, Variable* var) {
666 // Use destination as scratch.
667 MemOperand location = VarOperand(var, dest);
668 __ LoadP(dest, location, r0);
669}
670
671
672void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
673 Register scratch1) {
674 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
675 DCHECK(!scratch0.is(src));
676 DCHECK(!scratch0.is(scratch1));
677 DCHECK(!scratch1.is(src));
678 MemOperand location = VarOperand(var, scratch0);
679 __ StoreP(src, location, r0);
680
681 // Emit the write barrier code if the location is in the heap.
682 if (var->IsContextSlot()) {
683 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
684 kLRHasBeenSaved, kDontSaveFPRegs);
685 }
686}
687
688
689void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
690 bool should_normalize,
691 Label* if_true,
692 Label* if_false) {
693 // Only prepare for bailouts before splits if we're in a test
694 // context. Otherwise, we let the Visit function deal with the
695 // preparation to avoid preparing with the same AST id twice.
696 if (!context()->IsTest()) return;
697
698 Label skip;
699 if (should_normalize) __ b(&skip);
700 PrepareForBailout(expr, TOS_REG);
701 if (should_normalize) {
702 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
703 __ cmp(r3, ip);
704 Split(eq, if_true, if_false, NULL);
705 __ bind(&skip);
706 }
707}
708
709
710void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
711 // The variable in the declaration always resides in the current function
712 // context.
713 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100714 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000715 // Check that we're not inside a with or catch context.
716 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
717 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
718 __ Check(ne, kDeclarationInWithContext);
719 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
720 __ Check(ne, kDeclarationInCatchContext);
721 }
722}
723
724
725void FullCodeGenerator::VisitVariableDeclaration(
726 VariableDeclaration* declaration) {
727 // If it was not possible to allocate the variable at compile time, we
728 // need to "declare" it at runtime to make sure it actually exists in the
729 // local context.
730 VariableProxy* proxy = declaration->proxy();
731 VariableMode mode = declaration->mode();
732 Variable* variable = proxy->var();
733 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
734 switch (variable->location()) {
735 case VariableLocation::GLOBAL:
736 case VariableLocation::UNALLOCATED:
737 globals_->Add(variable->name(), zone());
738 globals_->Add(variable->binding_needs_init()
739 ? isolate()->factory()->the_hole_value()
740 : isolate()->factory()->undefined_value(),
741 zone());
742 break;
743
744 case VariableLocation::PARAMETER:
745 case VariableLocation::LOCAL:
746 if (hole_init) {
747 Comment cmnt(masm_, "[ VariableDeclaration");
748 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
749 __ StoreP(ip, StackOperand(variable));
750 }
751 break;
752
753 case VariableLocation::CONTEXT:
754 if (hole_init) {
755 Comment cmnt(masm_, "[ VariableDeclaration");
756 EmitDebugCheckDeclarationContext(variable);
757 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
758 __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0);
759 // No write barrier since the_hole_value is in old space.
760 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
761 }
762 break;
763
764 case VariableLocation::LOOKUP: {
765 Comment cmnt(masm_, "[ VariableDeclaration");
766 __ mov(r5, Operand(variable->name()));
767 // Declaration nodes are always introduced in one of four modes.
768 DCHECK(IsDeclaredVariableMode(mode));
769 // Push initial value, if any.
770 // Note: For variables we must not push an initial value (such as
771 // 'undefined') because we may have a (legal) redeclaration and we
772 // must not destroy the current value.
773 if (hole_init) {
774 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
775 } else {
776 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
777 }
778 __ Push(r5, r3);
779 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
780 __ CallRuntime(Runtime::kDeclareLookupSlot);
781 break;
782 }
783 }
784}
785
786
787void FullCodeGenerator::VisitFunctionDeclaration(
788 FunctionDeclaration* declaration) {
789 VariableProxy* proxy = declaration->proxy();
790 Variable* variable = proxy->var();
791 switch (variable->location()) {
792 case VariableLocation::GLOBAL:
793 case VariableLocation::UNALLOCATED: {
794 globals_->Add(variable->name(), zone());
795 Handle<SharedFunctionInfo> function =
796 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
797 // Check for stack-overflow exception.
798 if (function.is_null()) return SetStackOverflow();
799 globals_->Add(function, zone());
800 break;
801 }
802
803 case VariableLocation::PARAMETER:
804 case VariableLocation::LOCAL: {
805 Comment cmnt(masm_, "[ FunctionDeclaration");
806 VisitForAccumulatorValue(declaration->fun());
807 __ StoreP(result_register(), StackOperand(variable));
808 break;
809 }
810
811 case VariableLocation::CONTEXT: {
812 Comment cmnt(masm_, "[ FunctionDeclaration");
813 EmitDebugCheckDeclarationContext(variable);
814 VisitForAccumulatorValue(declaration->fun());
815 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()),
816 r0);
817 int offset = Context::SlotOffset(variable->index());
818 // We know that we have written a function, which is not a smi.
819 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
820 kLRHasBeenSaved, kDontSaveFPRegs,
821 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
822 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
823 break;
824 }
825
826 case VariableLocation::LOOKUP: {
827 Comment cmnt(masm_, "[ FunctionDeclaration");
828 __ mov(r5, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100829 PushOperand(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000830 // Push initial value for function declaration.
831 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100832 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
833 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000834 break;
835 }
836 }
837}
838
839
840void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
841 // Call the runtime to declare the globals.
842 __ mov(r4, Operand(pairs));
843 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
844 __ Push(r4, r3);
845 __ CallRuntime(Runtime::kDeclareGlobals);
846 // Return value is ignored.
847}
848
849
850void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
851 // Call the runtime to declare the modules.
852 __ Push(descriptions);
853 __ CallRuntime(Runtime::kDeclareModules);
854 // Return value is ignored.
855}
856
857
858void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
859 Comment cmnt(masm_, "[ SwitchStatement");
860 Breakable nested_statement(this, stmt);
861 SetStatementPosition(stmt);
862
863 // Keep the switch value on the stack until a case matches.
864 VisitForStackValue(stmt->tag());
865 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
866
867 ZoneList<CaseClause*>* clauses = stmt->cases();
868 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
869
870 Label next_test; // Recycled for each test.
871 // Compile all the tests with branches to their bodies.
872 for (int i = 0; i < clauses->length(); i++) {
873 CaseClause* clause = clauses->at(i);
874 clause->body_target()->Unuse();
875
876 // The default is not a test, but remember it as final fall through.
877 if (clause->is_default()) {
878 default_clause = clause;
879 continue;
880 }
881
882 Comment cmnt(masm_, "[ Case comparison");
883 __ bind(&next_test);
884 next_test.Unuse();
885
886 // Compile the label expression.
887 VisitForAccumulatorValue(clause->label());
888
889 // Perform the comparison as if via '==='.
890 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
891 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
892 JumpPatchSite patch_site(masm_);
893 if (inline_smi_code) {
894 Label slow_case;
895 __ orx(r5, r4, r3);
896 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
897
898 __ cmp(r4, r3);
899 __ bne(&next_test);
900 __ Drop(1); // Switch value is no longer needed.
901 __ b(clause->body_target());
902 __ bind(&slow_case);
903 }
904
905 // Record position before stub call for type feedback.
906 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100907 Handle<Code> ic =
908 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000909 CallIC(ic, clause->CompareId());
910 patch_site.EmitPatchInfo();
911
912 Label skip;
913 __ b(&skip);
914 PrepareForBailout(clause, TOS_REG);
915 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
916 __ cmp(r3, ip);
917 __ bne(&next_test);
918 __ Drop(1);
919 __ b(clause->body_target());
920 __ bind(&skip);
921
922 __ cmpi(r3, Operand::Zero());
923 __ bne(&next_test);
924 __ Drop(1); // Switch value is no longer needed.
925 __ b(clause->body_target());
926 }
927
928 // Discard the test value and jump to the default if present, otherwise to
929 // the end of the statement.
930 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100931 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000932 if (default_clause == NULL) {
933 __ b(nested_statement.break_label());
934 } else {
935 __ b(default_clause->body_target());
936 }
937
938 // Compile all the case bodies.
939 for (int i = 0; i < clauses->length(); i++) {
940 Comment cmnt(masm_, "[ Case body");
941 CaseClause* clause = clauses->at(i);
942 __ bind(clause->body_target());
943 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
944 VisitStatements(clause->statements());
945 }
946
947 __ bind(nested_statement.break_label());
948 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
949}
950
951
952void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
953 Comment cmnt(masm_, "[ ForInStatement");
954 SetStatementPosition(stmt, SKIP_BREAK);
955
956 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
957
958 Label loop, exit;
959 ForIn loop_statement(this, stmt);
960 increment_loop_depth();
961
Ben Murdoch097c5b22016-05-18 11:27:45 +0100962 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000963 SetExpressionAsStatementPosition(stmt->enumerable());
964 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100965 OperandStackDepthIncrement(ForIn::kElementCount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000966
Ben Murdoch097c5b22016-05-18 11:27:45 +0100967 // If the object is null or undefined, skip over the loop, otherwise convert
968 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000969 Label convert, done_convert;
970 __ JumpIfSmi(r3, &convert);
971 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
972 __ bge(&done_convert);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100973 __ CompareRoot(r3, Heap::kNullValueRootIndex);
974 __ beq(&exit);
975 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
976 __ beq(&exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000977 __ bind(&convert);
978 ToObjectStub stub(isolate());
979 __ CallStub(&stub);
980 __ bind(&done_convert);
981 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
982 __ push(r3);
983
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000984 // Check cache validity in generated code. This is a fast case for
985 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
986 // guarantee cache validity, call the runtime system to check cache
987 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100988 // Note: Proxies never have an enum cache, so will always take the
989 // slow path.
990 Label call_runtime;
991 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000992
993 // The enum cache is valid. Load the map of the object being
994 // iterated over and use the cache for the iteration.
995 Label use_cache;
996 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
997 __ b(&use_cache);
998
999 // Get the set of properties to enumerate.
1000 __ bind(&call_runtime);
1001 __ push(r3); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001002 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001003 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1004
1005 // If we got a map from the runtime call, we can do a fast
1006 // modification check. Otherwise, we got a fixed array, and we have
1007 // to do a slow check.
1008 Label fixed_array;
1009 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1010 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1011 __ cmp(r5, ip);
1012 __ bne(&fixed_array);
1013
1014 // We got a map in register r3. Get the enumeration cache from it.
1015 Label no_descriptors;
1016 __ bind(&use_cache);
1017
1018 __ EnumLength(r4, r3);
1019 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1020 __ beq(&no_descriptors);
1021
1022 __ LoadInstanceDescriptors(r3, r5);
1023 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1024 __ LoadP(r5,
1025 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1026
1027 // Set up the four remaining stack slots.
1028 __ push(r3); // Map.
1029 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1030 // Push enumeration cache, enumeration cache length (as smi) and zero.
1031 __ Push(r5, r4, r3);
1032 __ b(&loop);
1033
1034 __ bind(&no_descriptors);
1035 __ Drop(1);
1036 __ b(&exit);
1037
1038 // We got a fixed array in register r3. Iterate through that.
1039 __ bind(&fixed_array);
1040
Ben Murdoch097c5b22016-05-18 11:27:45 +01001041 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 __ EmitLoadTypeFeedbackVector(r4);
1043 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044 __ StoreP(
1045 r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(vector_index)), r0);
1046 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi(1) indicates slow check
1047 __ Push(r4, r3); // Smi and array
1048 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001049 __ Push(r4); // Fixed array length (as smi).
1050 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001051 __ LoadSmiLiteral(r3, Smi::FromInt(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001052 __ Push(r3); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001053
1054 // Generate code for doing the condition check.
1055 __ bind(&loop);
1056 SetExpressionAsStatementPosition(stmt->each());
1057
1058 // Load the current count to r3, load the length to r4.
1059 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1060 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1061 __ cmpl(r3, r4); // Compare to the array length.
1062 __ bge(loop_statement.break_label());
1063
1064 // Get the current entry of the array into register r6.
1065 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1066 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1067 __ SmiToPtrArrayOffset(r6, r3);
1068 __ LoadPX(r6, MemOperand(r6, r5));
1069
1070 // Get the expected map from the stack or a smi in the
1071 // permanent slow case into register r5.
1072 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1073
1074 // Check if the expected map still matches that of the enumerable.
1075 // If not, we may have to filter the key.
1076 Label update_each;
1077 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1078 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1079 __ cmp(r7, r5);
1080 __ beq(&update_each);
1081
Ben Murdoch097c5b22016-05-18 11:27:45 +01001082 // We might get here from TurboFan or Crankshaft when something in the
1083 // for-in loop body deopts and only now notice in fullcodegen, that we
1084 // can now longer use the enum cache, i.e. left fast mode. So better record
1085 // this information here, in case we later OSR back into this loop or
1086 // reoptimize the whole function w/o rerunning the loop with the slow
1087 // mode object in fullcodegen (which would result in a deopt loop).
1088 __ EmitLoadTypeFeedbackVector(r3);
1089 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1090 __ StoreP(
1091 r5, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0);
1092
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001093 // Convert the entry to a string or (smi) 0 if it isn't a property
1094 // any more. If the property has been removed while iterating, we
1095 // just skip it.
1096 __ Push(r4, r6); // Enumerable and current entry.
1097 __ CallRuntime(Runtime::kForInFilter);
1098 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1099 __ mr(r6, r3);
1100 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1101 __ cmp(r3, r0);
1102 __ beq(loop_statement.continue_label());
1103
1104 // Update the 'each' property or variable from the possibly filtered
1105 // entry in register r6.
1106 __ bind(&update_each);
1107 __ mr(result_register(), r6);
1108 // Perform the assignment as if via '='.
1109 {
1110 EffectContext context(this);
1111 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1112 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1113 }
1114
1115 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1116 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1117 // Generate code for the body of the loop.
1118 Visit(stmt->body());
1119
1120 // Generate code for the going to the next element by incrementing
1121 // the index (smi) stored on top of the stack.
1122 __ bind(loop_statement.continue_label());
1123 __ pop(r3);
1124 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1125 __ push(r3);
1126
1127 EmitBackEdgeBookkeeping(stmt, &loop);
1128 __ b(&loop);
1129
1130 // Remove the pointers stored on the stack.
1131 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001132 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001133
1134 // Exit and decrement the loop depth.
1135 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1136 __ bind(&exit);
1137 decrement_loop_depth();
1138}
1139
1140
1141void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1142 bool pretenure) {
1143 // Use the fast case closure allocation code that allocates in new
1144 // space for nested functions that don't need literals cloning. If
1145 // we're running with the --always-opt or the --prepare-always-opt
1146 // flag, we need to use the runtime function so that the new function
1147 // we are creating here gets a chance to have its code optimized and
1148 // doesn't just get a copy of the existing unoptimized code.
1149 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1150 scope()->is_function_scope() && info->num_literals() == 0) {
1151 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1152 __ mov(r5, Operand(info));
1153 __ CallStub(&stub);
1154 } else {
1155 __ Push(info);
1156 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1157 : Runtime::kNewClosure);
1158 }
1159 context()->Plug(r3);
1160}
1161
1162
1163void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1164 FeedbackVectorSlot slot) {
1165 DCHECK(NeedsHomeObject(initializer));
1166 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1167 __ mov(StoreDescriptor::NameRegister(),
1168 Operand(isolate()->factory()->home_object_symbol()));
1169 __ LoadP(StoreDescriptor::ValueRegister(),
1170 MemOperand(sp, offset * kPointerSize));
1171 EmitLoadStoreICSlot(slot);
1172 CallStoreIC();
1173}
1174
1175
1176void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1177 int offset,
1178 FeedbackVectorSlot slot) {
1179 DCHECK(NeedsHomeObject(initializer));
1180 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1181 __ mov(StoreDescriptor::NameRegister(),
1182 Operand(isolate()->factory()->home_object_symbol()));
1183 __ LoadP(StoreDescriptor::ValueRegister(),
1184 MemOperand(sp, offset * kPointerSize));
1185 EmitLoadStoreICSlot(slot);
1186 CallStoreIC();
1187}
1188
1189
1190void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1191 TypeofMode typeof_mode,
1192 Label* slow) {
1193 Register current = cp;
1194 Register next = r4;
1195 Register temp = r5;
1196
1197 Scope* s = scope();
1198 while (s != NULL) {
1199 if (s->num_heap_slots() > 0) {
1200 if (s->calls_sloppy_eval()) {
1201 // Check that extension is "the hole".
1202 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1203 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1204 }
1205 // Load next context in chain.
1206 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1207 // Walk the rest of the chain without clobbering cp.
1208 current = next;
1209 }
1210 // If no outer scope calls eval, we do not need to check more
1211 // context extensions.
1212 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1213 s = s->outer_scope();
1214 }
1215
1216 if (s->is_eval_scope()) {
1217 Label loop, fast;
1218 if (!current.is(next)) {
1219 __ Move(next, current);
1220 }
1221 __ bind(&loop);
1222 // Terminate at native context.
1223 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1224 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1225 __ cmp(temp, ip);
1226 __ beq(&fast);
1227 // Check that extension is "the hole".
1228 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1229 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1230 // Load next context in chain.
1231 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1232 __ b(&loop);
1233 __ bind(&fast);
1234 }
1235
1236 // All extension objects were empty and it is safe to use a normal global
1237 // load machinery.
1238 EmitGlobalVariableLoad(proxy, typeof_mode);
1239}
1240
1241
1242MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1243 Label* slow) {
1244 DCHECK(var->IsContextSlot());
1245 Register context = cp;
1246 Register next = r6;
1247 Register temp = r7;
1248
1249 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1250 if (s->num_heap_slots() > 0) {
1251 if (s->calls_sloppy_eval()) {
1252 // Check that extension is "the hole".
1253 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1254 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1255 }
1256 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1257 // Walk the rest of the chain without clobbering cp.
1258 context = next;
1259 }
1260 }
1261 // Check that last extension is "the hole".
1262 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1263 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1264
1265 // This function is used only for loads, not stores, so it's safe to
1266 // return an cp-based operand (the write barrier cannot be allowed to
1267 // destroy the cp register).
1268 return ContextMemOperand(context, var->index());
1269}
1270
1271
1272void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1273 TypeofMode typeof_mode,
1274 Label* slow, Label* done) {
1275 // Generate fast-case code for variables that might be shadowed by
1276 // eval-introduced variables. Eval is used a lot without
1277 // introducing variables. In those cases, we do not want to
1278 // perform a runtime call for all variables in the scope
1279 // containing the eval.
1280 Variable* var = proxy->var();
1281 if (var->mode() == DYNAMIC_GLOBAL) {
1282 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1283 __ b(done);
1284 } else if (var->mode() == DYNAMIC_LOCAL) {
1285 Variable* local = var->local_if_not_shadowed();
1286 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1287 if (local->mode() == LET || local->mode() == CONST ||
1288 local->mode() == CONST_LEGACY) {
1289 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1290 __ bne(done);
1291 if (local->mode() == CONST_LEGACY) {
1292 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1293 } else { // LET || CONST
1294 __ mov(r3, Operand(var->name()));
1295 __ push(r3);
1296 __ CallRuntime(Runtime::kThrowReferenceError);
1297 }
1298 }
1299 __ b(done);
1300 }
1301}
1302
1303
1304void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1305 TypeofMode typeof_mode) {
1306 Variable* var = proxy->var();
1307 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1308 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1309 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1310 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1311 __ mov(LoadDescriptor::SlotRegister(),
1312 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1313 CallLoadIC(typeof_mode);
1314}
1315
1316
1317void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1318 TypeofMode typeof_mode) {
1319 // Record position before possible IC call.
1320 SetExpressionPosition(proxy);
1321 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1322 Variable* var = proxy->var();
1323
1324 // Three cases: global variables, lookup variables, and all other types of
1325 // variables.
1326 switch (var->location()) {
1327 case VariableLocation::GLOBAL:
1328 case VariableLocation::UNALLOCATED: {
1329 Comment cmnt(masm_, "[ Global variable");
1330 EmitGlobalVariableLoad(proxy, typeof_mode);
1331 context()->Plug(r3);
1332 break;
1333 }
1334
1335 case VariableLocation::PARAMETER:
1336 case VariableLocation::LOCAL:
1337 case VariableLocation::CONTEXT: {
1338 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1339 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1340 : "[ Stack variable");
1341 if (NeedsHoleCheckForLoad(proxy)) {
1342 Label done;
1343 // Let and const need a read barrier.
1344 GetVar(r3, var);
1345 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1346 __ bne(&done);
1347 if (var->mode() == LET || var->mode() == CONST) {
1348 // Throw a reference error when using an uninitialized let/const
1349 // binding in harmony mode.
1350 __ mov(r3, Operand(var->name()));
1351 __ push(r3);
1352 __ CallRuntime(Runtime::kThrowReferenceError);
1353 } else {
1354 // Uninitialized legacy const bindings are unholed.
1355 DCHECK(var->mode() == CONST_LEGACY);
1356 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1357 }
1358 __ bind(&done);
1359 context()->Plug(r3);
1360 break;
1361 }
1362 context()->Plug(var);
1363 break;
1364 }
1365
1366 case VariableLocation::LOOKUP: {
1367 Comment cmnt(masm_, "[ Lookup variable");
1368 Label done, slow;
1369 // Generate code for loading from variables potentially shadowed
1370 // by eval-introduced variables.
1371 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1372 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001373 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001374 Runtime::FunctionId function_id =
1375 typeof_mode == NOT_INSIDE_TYPEOF
1376 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001377 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001378 __ CallRuntime(function_id);
1379 __ bind(&done);
1380 context()->Plug(r3);
1381 }
1382 }
1383}
1384
1385
1386void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1387 Comment cmnt(masm_, "[ RegExpLiteral");
1388 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1389 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1390 __ mov(r4, Operand(expr->pattern()));
1391 __ LoadSmiLiteral(r3, Smi::FromInt(expr->flags()));
1392 FastCloneRegExpStub stub(isolate());
1393 __ CallStub(&stub);
1394 context()->Plug(r3);
1395}
1396
1397
1398void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1399 Expression* expression = (property == NULL) ? NULL : property->value();
1400 if (expression == NULL) {
1401 __ LoadRoot(r4, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001402 PushOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001403 } else {
1404 VisitForStackValue(expression);
1405 if (NeedsHomeObject(expression)) {
1406 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1407 property->kind() == ObjectLiteral::Property::SETTER);
1408 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1409 EmitSetHomeObject(expression, offset, property->GetSlot());
1410 }
1411 }
1412}
1413
1414
1415void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1416 Comment cmnt(masm_, "[ ObjectLiteral");
1417
1418 Handle<FixedArray> constant_properties = expr->constant_properties();
1419 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1420 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1421 __ mov(r4, Operand(constant_properties));
1422 int flags = expr->ComputeFlags();
1423 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1424 if (MustCreateObjectLiteralWithRuntime(expr)) {
1425 __ Push(r6, r5, r4, r3);
1426 __ CallRuntime(Runtime::kCreateObjectLiteral);
1427 } else {
1428 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1429 __ CallStub(&stub);
1430 }
1431 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1432
1433 // If result_saved is true the result is on top of the stack. If
1434 // result_saved is false the result is in r3.
1435 bool result_saved = false;
1436
1437 AccessorTable accessor_table(zone());
1438 int property_index = 0;
1439 for (; property_index < expr->properties()->length(); property_index++) {
1440 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1441 if (property->is_computed_name()) break;
1442 if (property->IsCompileTimeValue()) continue;
1443
1444 Literal* key = property->key()->AsLiteral();
1445 Expression* value = property->value();
1446 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001447 PushOperand(r3); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448 result_saved = true;
1449 }
1450 switch (property->kind()) {
1451 case ObjectLiteral::Property::CONSTANT:
1452 UNREACHABLE();
1453 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1454 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1455 // Fall through.
1456 case ObjectLiteral::Property::COMPUTED:
1457 // It is safe to use [[Put]] here because the boilerplate already
1458 // contains computed properties with an uninitialized value.
1459 if (key->value()->IsInternalizedString()) {
1460 if (property->emit_store()) {
1461 VisitForAccumulatorValue(value);
1462 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1463 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1464 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1465 EmitLoadStoreICSlot(property->GetSlot(0));
1466 CallStoreIC();
1467 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1468
1469 if (NeedsHomeObject(value)) {
1470 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1471 }
1472 } else {
1473 VisitForEffect(value);
1474 }
1475 break;
1476 }
1477 // Duplicate receiver on stack.
1478 __ LoadP(r3, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001479 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001480 VisitForStackValue(key);
1481 VisitForStackValue(value);
1482 if (property->emit_store()) {
1483 if (NeedsHomeObject(value)) {
1484 EmitSetHomeObject(value, 2, property->GetSlot());
1485 }
1486 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
Ben Murdoch097c5b22016-05-18 11:27:45 +01001487 PushOperand(r3);
1488 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001489 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001490 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001491 }
1492 break;
1493 case ObjectLiteral::Property::PROTOTYPE:
1494 // Duplicate receiver on stack.
1495 __ LoadP(r3, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001496 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001497 VisitForStackValue(value);
1498 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001499 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001500 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1501 NO_REGISTERS);
1502 break;
1503 case ObjectLiteral::Property::GETTER:
1504 if (property->emit_store()) {
1505 accessor_table.lookup(key)->second->getter = property;
1506 }
1507 break;
1508 case ObjectLiteral::Property::SETTER:
1509 if (property->emit_store()) {
1510 accessor_table.lookup(key)->second->setter = property;
1511 }
1512 break;
1513 }
1514 }
1515
1516 // Emit code to define accessors, using only a single call to the runtime for
1517 // each pair of corresponding getters and setters.
1518 for (AccessorTable::Iterator it = accessor_table.begin();
1519 it != accessor_table.end(); ++it) {
1520 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001521 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001522 VisitForStackValue(it->first);
1523 EmitAccessor(it->second->getter);
1524 EmitAccessor(it->second->setter);
1525 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001526 PushOperand(r3);
1527 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528 }
1529
1530 // Object literals have two parts. The "static" part on the left contains no
1531 // computed property names, and so we can compute its map ahead of time; see
1532 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1533 // starts with the first computed property name, and continues with all
1534 // properties to its right. All the code from above initializes the static
1535 // component of the object literal, and arranges for the map of the result to
1536 // reflect the static order in which the keys appear. For the dynamic
1537 // properties, we compile them into a series of "SetOwnProperty" runtime
1538 // calls. This will preserve insertion order.
1539 for (; property_index < expr->properties()->length(); property_index++) {
1540 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1541
1542 Expression* value = property->value();
1543 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001544 PushOperand(r3); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001545 result_saved = true;
1546 }
1547
1548 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001549 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001550
1551 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1552 DCHECK(!property->is_computed_name());
1553 VisitForStackValue(value);
1554 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001555 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001556 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1557 NO_REGISTERS);
1558 } else {
1559 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1560 VisitForStackValue(value);
1561 if (NeedsHomeObject(value)) {
1562 EmitSetHomeObject(value, 2, property->GetSlot());
1563 }
1564
1565 switch (property->kind()) {
1566 case ObjectLiteral::Property::CONSTANT:
1567 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1568 case ObjectLiteral::Property::COMPUTED:
1569 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001570 PushOperand(Smi::FromInt(NONE));
1571 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1572 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001573 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001574 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575 }
1576 break;
1577
1578 case ObjectLiteral::Property::PROTOTYPE:
1579 UNREACHABLE();
1580 break;
1581
1582 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001583 PushOperand(Smi::FromInt(NONE));
1584 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 break;
1586
1587 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001588 PushOperand(Smi::FromInt(NONE));
1589 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590 break;
1591 }
1592 }
1593 }
1594
1595 if (expr->has_function()) {
1596 DCHECK(result_saved);
1597 __ LoadP(r3, MemOperand(sp));
1598 __ push(r3);
1599 __ CallRuntime(Runtime::kToFastProperties);
1600 }
1601
1602 if (result_saved) {
1603 context()->PlugTOS();
1604 } else {
1605 context()->Plug(r3);
1606 }
1607}
1608
1609
1610void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1611 Comment cmnt(masm_, "[ ArrayLiteral");
1612
1613 Handle<FixedArray> constant_elements = expr->constant_elements();
1614 bool has_fast_elements =
1615 IsFastObjectElementsKind(expr->constant_elements_kind());
1616 Handle<FixedArrayBase> constant_elements_values(
1617 FixedArrayBase::cast(constant_elements->get(1)));
1618
1619 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1620 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1621 // If the only customer of allocation sites is transitioning, then
1622 // we can turn it off if we don't have anywhere else to transition to.
1623 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1624 }
1625
1626 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1627 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1628 __ mov(r4, Operand(constant_elements));
1629 if (MustCreateArrayLiteralWithRuntime(expr)) {
1630 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1631 __ Push(r6, r5, r4, r3);
1632 __ CallRuntime(Runtime::kCreateArrayLiteral);
1633 } else {
1634 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1635 __ CallStub(&stub);
1636 }
1637 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1638
1639 bool result_saved = false; // Is the result saved to the stack?
1640 ZoneList<Expression*>* subexprs = expr->values();
1641 int length = subexprs->length();
1642
1643 // Emit code to evaluate all the non-constant subexpressions and to store
1644 // them into the newly cloned array.
1645 int array_index = 0;
1646 for (; array_index < length; array_index++) {
1647 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001648 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001649 // If the subexpression is a literal or a simple materialized literal it
1650 // is already set in the cloned array.
1651 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1652
1653 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001654 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001655 result_saved = true;
1656 }
1657 VisitForAccumulatorValue(subexpr);
1658
1659 __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1660 Smi::FromInt(array_index));
1661 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1662 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1663 Handle<Code> ic =
1664 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1665 CallIC(ic);
1666
1667 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1668 }
1669
1670 // In case the array literal contains spread expressions it has two parts. The
1671 // first part is the "static" array which has a literal index is handled
1672 // above. The second part is the part after the first spread expression
1673 // (inclusive) and these elements gets appended to the array. Note that the
1674 // number elements an iterable produces is unknown ahead of time.
1675 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001676 PopOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001677 result_saved = false;
1678 }
1679 for (; array_index < length; array_index++) {
1680 Expression* subexpr = subexprs->at(array_index);
1681
Ben Murdoch097c5b22016-05-18 11:27:45 +01001682 PushOperand(r3);
1683 DCHECK(!subexpr->IsSpread());
1684 VisitForStackValue(subexpr);
1685 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001686
1687 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1688 }
1689
1690 if (result_saved) {
1691 context()->PlugTOS();
1692 } else {
1693 context()->Plug(r3);
1694 }
1695}
1696
1697
1698void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1699 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1700
1701 Comment cmnt(masm_, "[ Assignment");
1702 SetExpressionPosition(expr, INSERT_BREAK);
1703
1704 Property* property = expr->target()->AsProperty();
1705 LhsKind assign_type = Property::GetAssignType(property);
1706
1707 // Evaluate LHS expression.
1708 switch (assign_type) {
1709 case VARIABLE:
1710 // Nothing to do here.
1711 break;
1712 case NAMED_PROPERTY:
1713 if (expr->is_compound()) {
1714 // We need the receiver both on the stack and in the register.
1715 VisitForStackValue(property->obj());
1716 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1717 } else {
1718 VisitForStackValue(property->obj());
1719 }
1720 break;
1721 case NAMED_SUPER_PROPERTY:
1722 VisitForStackValue(
1723 property->obj()->AsSuperPropertyReference()->this_var());
1724 VisitForAccumulatorValue(
1725 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001726 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001727 if (expr->is_compound()) {
1728 const Register scratch = r4;
1729 __ LoadP(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001730 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001731 }
1732 break;
1733 case KEYED_SUPER_PROPERTY: {
1734 const Register scratch = r4;
1735 VisitForStackValue(
1736 property->obj()->AsSuperPropertyReference()->this_var());
1737 VisitForAccumulatorValue(
1738 property->obj()->AsSuperPropertyReference()->home_object());
1739 __ mr(scratch, result_register());
1740 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001741 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001742 if (expr->is_compound()) {
1743 const Register scratch1 = r5;
1744 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001745 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001746 }
1747 break;
1748 }
1749 case KEYED_PROPERTY:
1750 if (expr->is_compound()) {
1751 VisitForStackValue(property->obj());
1752 VisitForStackValue(property->key());
1753 __ LoadP(LoadDescriptor::ReceiverRegister(),
1754 MemOperand(sp, 1 * kPointerSize));
1755 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1756 } else {
1757 VisitForStackValue(property->obj());
1758 VisitForStackValue(property->key());
1759 }
1760 break;
1761 }
1762
1763 // For compound assignments we need another deoptimization point after the
1764 // variable/property load.
1765 if (expr->is_compound()) {
1766 {
1767 AccumulatorValueContext context(this);
1768 switch (assign_type) {
1769 case VARIABLE:
1770 EmitVariableLoad(expr->target()->AsVariableProxy());
1771 PrepareForBailout(expr->target(), TOS_REG);
1772 break;
1773 case NAMED_PROPERTY:
1774 EmitNamedPropertyLoad(property);
1775 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1776 break;
1777 case NAMED_SUPER_PROPERTY:
1778 EmitNamedSuperPropertyLoad(property);
1779 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1780 break;
1781 case KEYED_SUPER_PROPERTY:
1782 EmitKeyedSuperPropertyLoad(property);
1783 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1784 break;
1785 case KEYED_PROPERTY:
1786 EmitKeyedPropertyLoad(property);
1787 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1788 break;
1789 }
1790 }
1791
1792 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001793 PushOperand(r3); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001794 VisitForAccumulatorValue(expr->value());
1795
1796 AccumulatorValueContext context(this);
1797 if (ShouldInlineSmiCase(op)) {
1798 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1799 expr->value());
1800 } else {
1801 EmitBinaryOp(expr->binary_operation(), op);
1802 }
1803
1804 // Deoptimization point in case the binary operation may have side effects.
1805 PrepareForBailout(expr->binary_operation(), TOS_REG);
1806 } else {
1807 VisitForAccumulatorValue(expr->value());
1808 }
1809
1810 SetExpressionPosition(expr);
1811
1812 // Store the value.
1813 switch (assign_type) {
1814 case VARIABLE:
1815 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1816 expr->op(), expr->AssignmentSlot());
1817 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1818 context()->Plug(r3);
1819 break;
1820 case NAMED_PROPERTY:
1821 EmitNamedPropertyAssignment(expr);
1822 break;
1823 case NAMED_SUPER_PROPERTY:
1824 EmitNamedSuperPropertyStore(property);
1825 context()->Plug(r3);
1826 break;
1827 case KEYED_SUPER_PROPERTY:
1828 EmitKeyedSuperPropertyStore(property);
1829 context()->Plug(r3);
1830 break;
1831 case KEYED_PROPERTY:
1832 EmitKeyedPropertyAssignment(expr);
1833 break;
1834 }
1835}
1836
1837
1838void FullCodeGenerator::VisitYield(Yield* expr) {
1839 Comment cmnt(masm_, "[ Yield");
1840 SetExpressionPosition(expr);
1841
1842 // Evaluate yielded value first; the initial iterator definition depends on
1843 // this. It stays on the stack while we update the iterator.
1844 VisitForStackValue(expr->expression());
1845
1846 switch (expr->yield_kind()) {
1847 case Yield::kSuspend:
1848 // Pop value from top-of-stack slot; box result into result register.
1849 EmitCreateIteratorResult(false);
1850 __ push(result_register());
1851 // Fall through.
1852 case Yield::kInitial: {
1853 Label suspend, continuation, post_runtime, resume;
1854
1855 __ b(&suspend);
1856 __ bind(&continuation);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001857 // When we arrive here, the stack top is the resume mode and
1858 // result_register() holds the input value (the argument given to the
1859 // respective resume operation).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001860 __ RecordGeneratorContinuation();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001861 __ pop(r4);
1862 __ CmpSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::RETURN), r0);
1863 __ bne(&resume);
1864 __ push(result_register());
1865 EmitCreateIteratorResult(true);
1866 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001867
1868 __ bind(&suspend);
1869 VisitForAccumulatorValue(expr->generator_object());
1870 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1871 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
1872 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
1873 r0);
1874 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
1875 __ mr(r4, cp);
1876 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
1877 kLRHasBeenSaved, kDontSaveFPRegs);
1878 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1879 __ cmp(sp, r4);
1880 __ beq(&post_runtime);
1881 __ push(r3); // generator object
1882 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1883 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1884 __ bind(&post_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001885 PopOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001886 EmitReturnSequence();
1887
1888 __ bind(&resume);
1889 context()->Plug(result_register());
1890 break;
1891 }
1892
1893 case Yield::kFinal: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001894 // Pop value from top-of-stack slot, box result into result register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001895 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001896 EmitCreateIteratorResult(true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001897 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001898 break;
1899 }
1900
Ben Murdoch097c5b22016-05-18 11:27:45 +01001901 case Yield::kDelegating:
1902 UNREACHABLE();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001903 }
1904}
1905
1906
1907void FullCodeGenerator::EmitGeneratorResume(
1908 Expression* generator, Expression* value,
1909 JSGeneratorObject::ResumeMode resume_mode) {
1910 // The value stays in r3, and is ultimately read by the resumed generator, as
1911 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1912 // is read to throw the value when the resumed generator is already closed.
1913 // r4 will hold the generator object until the activation has been resumed.
1914 VisitForStackValue(generator);
1915 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001916 PopOperand(r4);
1917
1918 // Store input value into generator object.
1919 __ StoreP(result_register(),
1920 FieldMemOperand(r4, JSGeneratorObject::kInputOffset), r0);
1921 __ mr(r5, result_register());
1922 __ RecordWriteField(r4, JSGeneratorObject::kInputOffset, r5, r6,
1923 kLRHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001924
1925 // Load suspended function and context.
1926 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
1927 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
1928
1929 // Load receiver and store as the first argument.
1930 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
1931 __ push(r5);
1932
1933 // Push holes for the rest of the arguments to the generator function.
1934 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
1935 __ LoadWordArith(
1936 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1937 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
1938 Label argument_loop, push_frame;
1939#if V8_TARGET_ARCH_PPC64
1940 __ cmpi(r6, Operand::Zero());
1941 __ beq(&push_frame);
1942#else
1943 __ SmiUntag(r6, SetRC);
1944 __ beq(&push_frame, cr0);
1945#endif
1946 __ mtctr(r6);
1947 __ bind(&argument_loop);
1948 __ push(r5);
1949 __ bdnz(&argument_loop);
1950
1951 // Enter a new JavaScript frame, and initialize its slots as they were when
1952 // the generator was suspended.
1953 Label resume_frame, done;
1954 __ bind(&push_frame);
1955 __ b(&resume_frame, SetLK);
1956 __ b(&done);
1957 __ bind(&resume_frame);
1958 // lr = return address.
1959 // fp = caller's frame pointer.
1960 // cp = callee's context,
1961 // r7 = callee's JS function.
1962 __ PushFixedFrame(r7);
1963 // Adjust FP to point to saved FP.
1964 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1965
1966 // Load the operand stack size.
1967 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
1968 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
1969 __ SmiUntag(r6, SetRC);
1970
1971 // If we are sending a value and there is no operand stack, we can jump back
1972 // in directly.
1973 Label call_resume;
1974 if (resume_mode == JSGeneratorObject::NEXT) {
1975 Label slow_resume;
1976 __ bne(&slow_resume, cr0);
1977 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
1978 {
1979 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
1980 if (FLAG_enable_embedded_constant_pool) {
1981 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
1982 }
1983 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
1984 __ SmiUntag(r5);
1985 __ add(ip, ip, r5);
1986 __ LoadSmiLiteral(r5,
1987 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
1988 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
1989 r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001990 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001991 __ Jump(ip);
1992 __ bind(&slow_resume);
1993 }
1994 } else {
1995 __ beq(&call_resume, cr0);
1996 }
1997
1998 // Otherwise, we push holes for the operand stack and call the runtime to fix
1999 // up the stack and the handlers.
2000 Label operand_loop;
2001 __ mtctr(r6);
2002 __ bind(&operand_loop);
2003 __ push(r5);
2004 __ bdnz(&operand_loop);
2005
2006 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002007 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002008 DCHECK(!result_register().is(r4));
2009 __ Push(r4, result_register());
2010 __ Push(Smi::FromInt(resume_mode));
2011 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2012 // Not reached: the runtime call returns elsewhere.
2013 __ stop("not-reached");
2014
2015 __ bind(&done);
2016 context()->Plug(result_register());
2017}
2018
Ben Murdoch097c5b22016-05-18 11:27:45 +01002019void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
2020 OperandStackDepthIncrement(2);
2021 __ Push(reg1, reg2);
2022}
2023
2024void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
2025 Register reg3) {
2026 OperandStackDepthIncrement(3);
2027 __ Push(reg1, reg2, reg3);
2028}
2029
2030void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
2031 Register reg3, Register reg4) {
2032 OperandStackDepthIncrement(4);
2033 __ Push(reg1, reg2, reg3, reg4);
2034}
2035
2036void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
2037 OperandStackDepthDecrement(2);
2038 __ Pop(reg1, reg2);
2039}
2040
2041void FullCodeGenerator::EmitOperandStackDepthCheck() {
2042 if (FLAG_debug_code) {
2043 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
2044 operand_stack_depth_ * kPointerSize;
2045 __ sub(r3, fp, sp);
2046 __ cmpi(r3, Operand(expected_diff));
2047 __ Assert(eq, kUnexpectedStackDepth);
2048 }
2049}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002050
2051void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2052 Label allocate, done_allocate;
2053
2054 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate, TAG_OBJECT);
2055 __ b(&done_allocate);
2056
2057 __ bind(&allocate);
2058 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2059 __ CallRuntime(Runtime::kAllocateInNewSpace);
2060
2061 __ bind(&done_allocate);
2062 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
2063 __ pop(r5);
2064 __ LoadRoot(r6,
2065 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2066 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
2067 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2068 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2069 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2070 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
2071 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
2072}
2073
2074
2075void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2076 SetExpressionPosition(prop);
2077 Literal* key = prop->key()->AsLiteral();
2078 DCHECK(!prop->IsSuperAccess());
2079
2080 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2081 __ mov(LoadDescriptor::SlotRegister(),
2082 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002083 CallLoadIC(NOT_INSIDE_TYPEOF);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002084}
2085
2086
2087void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2088 Token::Value op,
2089 Expression* left_expr,
2090 Expression* right_expr) {
2091 Label done, smi_case, stub_call;
2092
2093 Register scratch1 = r5;
2094 Register scratch2 = r6;
2095
2096 // Get the arguments.
2097 Register left = r4;
2098 Register right = r3;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002099 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002100
2101 // Perform combined smi check on both operands.
2102 __ orx(scratch1, left, right);
2103 STATIC_ASSERT(kSmiTag == 0);
2104 JumpPatchSite patch_site(masm_);
2105 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2106
2107 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002108 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002109 CallIC(code, expr->BinaryOperationFeedbackId());
2110 patch_site.EmitPatchInfo();
2111 __ b(&done);
2112
2113 __ bind(&smi_case);
2114 // Smi case. This code works the same way as the smi-smi case in the type
2115 // recording binary operation stub.
2116 switch (op) {
2117 case Token::SAR:
2118 __ GetLeastBitsFromSmi(scratch1, right, 5);
2119 __ ShiftRightArith(right, left, scratch1);
2120 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2121 break;
2122 case Token::SHL: {
2123 __ GetLeastBitsFromSmi(scratch2, right, 5);
2124#if V8_TARGET_ARCH_PPC64
2125 __ ShiftLeft_(right, left, scratch2);
2126#else
2127 __ SmiUntag(scratch1, left);
2128 __ ShiftLeft_(scratch1, scratch1, scratch2);
2129 // Check that the *signed* result fits in a smi
2130 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2131 __ SmiTag(right, scratch1);
2132#endif
2133 break;
2134 }
2135 case Token::SHR: {
2136 __ SmiUntag(scratch1, left);
2137 __ GetLeastBitsFromSmi(scratch2, right, 5);
2138 __ srw(scratch1, scratch1, scratch2);
2139 // Unsigned shift is not allowed to produce a negative number.
2140 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2141 __ SmiTag(right, scratch1);
2142 break;
2143 }
2144 case Token::ADD: {
2145 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2146 __ BranchOnOverflow(&stub_call);
2147 __ mr(right, scratch1);
2148 break;
2149 }
2150 case Token::SUB: {
2151 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2152 __ BranchOnOverflow(&stub_call);
2153 __ mr(right, scratch1);
2154 break;
2155 }
2156 case Token::MUL: {
2157 Label mul_zero;
2158#if V8_TARGET_ARCH_PPC64
2159 // Remove tag from both operands.
2160 __ SmiUntag(ip, right);
2161 __ SmiUntag(r0, left);
2162 __ Mul(scratch1, r0, ip);
2163 // Check for overflowing the smi range - no overflow if higher 33 bits of
2164 // the result are identical.
2165 __ TestIfInt32(scratch1, r0);
2166 __ bne(&stub_call);
2167#else
2168 __ SmiUntag(ip, right);
2169 __ mullw(scratch1, left, ip);
2170 __ mulhw(scratch2, left, ip);
2171 // Check for overflowing the smi range - no overflow if higher 33 bits of
2172 // the result are identical.
2173 __ TestIfInt32(scratch2, scratch1, ip);
2174 __ bne(&stub_call);
2175#endif
2176 // Go slow on zero result to handle -0.
2177 __ cmpi(scratch1, Operand::Zero());
2178 __ beq(&mul_zero);
2179#if V8_TARGET_ARCH_PPC64
2180 __ SmiTag(right, scratch1);
2181#else
2182 __ mr(right, scratch1);
2183#endif
2184 __ b(&done);
2185 // We need -0 if we were multiplying a negative number with 0 to get 0.
2186 // We know one of them was zero.
2187 __ bind(&mul_zero);
2188 __ add(scratch2, right, left);
2189 __ cmpi(scratch2, Operand::Zero());
2190 __ blt(&stub_call);
2191 __ LoadSmiLiteral(right, Smi::FromInt(0));
2192 break;
2193 }
2194 case Token::BIT_OR:
2195 __ orx(right, left, right);
2196 break;
2197 case Token::BIT_AND:
2198 __ and_(right, left, right);
2199 break;
2200 case Token::BIT_XOR:
2201 __ xor_(right, left, right);
2202 break;
2203 default:
2204 UNREACHABLE();
2205 }
2206
2207 __ bind(&done);
2208 context()->Plug(r3);
2209}
2210
2211
2212void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002213 for (int i = 0; i < lit->properties()->length(); i++) {
2214 ObjectLiteral::Property* property = lit->properties()->at(i);
2215 Expression* value = property->value();
2216
Ben Murdoch097c5b22016-05-18 11:27:45 +01002217 Register scratch = r4;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002218 if (property->is_static()) {
2219 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2220 } else {
2221 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2222 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002223 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002224 EmitPropertyKey(property, lit->GetIdForProperty(i));
2225
2226 // The static prototype property is read only. We handle the non computed
2227 // property name case in the parser. Since this is the only case where we
2228 // need to check for an own read only property we special case this so we do
2229 // not need to do this for every property.
2230 if (property->is_static() && property->is_computed_name()) {
2231 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2232 __ push(r3);
2233 }
2234
2235 VisitForStackValue(value);
2236 if (NeedsHomeObject(value)) {
2237 EmitSetHomeObject(value, 2, property->GetSlot());
2238 }
2239
2240 switch (property->kind()) {
2241 case ObjectLiteral::Property::CONSTANT:
2242 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2243 case ObjectLiteral::Property::PROTOTYPE:
2244 UNREACHABLE();
2245 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002246 PushOperand(Smi::FromInt(DONT_ENUM));
2247 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2248 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002249 break;
2250
2251 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002252 PushOperand(Smi::FromInt(DONT_ENUM));
2253 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002254 break;
2255
2256 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002257 PushOperand(Smi::FromInt(DONT_ENUM));
2258 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002259 break;
2260
2261 default:
2262 UNREACHABLE();
2263 }
2264 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002265}
2266
2267
2268void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002269 PopOperand(r4);
2270 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002271 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2272 CallIC(code, expr->BinaryOperationFeedbackId());
2273 patch_site.EmitPatchInfo();
2274 context()->Plug(r3);
2275}
2276
2277
2278void FullCodeGenerator::EmitAssignment(Expression* expr,
2279 FeedbackVectorSlot slot) {
2280 DCHECK(expr->IsValidReferenceExpressionOrThis());
2281
2282 Property* prop = expr->AsProperty();
2283 LhsKind assign_type = Property::GetAssignType(prop);
2284
2285 switch (assign_type) {
2286 case VARIABLE: {
2287 Variable* var = expr->AsVariableProxy()->var();
2288 EffectContext context(this);
2289 EmitVariableAssignment(var, Token::ASSIGN, slot);
2290 break;
2291 }
2292 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002293 PushOperand(r3); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294 VisitForAccumulatorValue(prop->obj());
2295 __ Move(StoreDescriptor::ReceiverRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002296 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002297 __ mov(StoreDescriptor::NameRegister(),
2298 Operand(prop->key()->AsLiteral()->value()));
2299 EmitLoadStoreICSlot(slot);
2300 CallStoreIC();
2301 break;
2302 }
2303 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002304 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002305 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2306 VisitForAccumulatorValue(
2307 prop->obj()->AsSuperPropertyReference()->home_object());
2308 // stack: value, this; r3: home_object
2309 Register scratch = r5;
2310 Register scratch2 = r6;
2311 __ mr(scratch, result_register()); // home_object
2312 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2313 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2314 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2315 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2316 // stack: this, home_object; r3: value
2317 EmitNamedSuperPropertyStore(prop);
2318 break;
2319 }
2320 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002321 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002322 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2323 VisitForStackValue(
2324 prop->obj()->AsSuperPropertyReference()->home_object());
2325 VisitForAccumulatorValue(prop->key());
2326 Register scratch = r5;
2327 Register scratch2 = r6;
2328 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2329 // stack: value, this, home_object; r3: key, r6: value
2330 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2331 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2332 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2333 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2334 __ StoreP(r3, MemOperand(sp, 0));
2335 __ Move(r3, scratch2);
2336 // stack: this, home_object, key; r3: value.
2337 EmitKeyedSuperPropertyStore(prop);
2338 break;
2339 }
2340 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002341 PushOperand(r3); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002342 VisitForStackValue(prop->obj());
2343 VisitForAccumulatorValue(prop->key());
2344 __ Move(StoreDescriptor::NameRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002345 PopOperands(StoreDescriptor::ValueRegister(),
2346 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002347 EmitLoadStoreICSlot(slot);
2348 Handle<Code> ic =
2349 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2350 CallIC(ic);
2351 break;
2352 }
2353 }
2354 context()->Plug(r3);
2355}
2356
2357
2358void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2359 Variable* var, MemOperand location) {
2360 __ StoreP(result_register(), location, r0);
2361 if (var->IsContextSlot()) {
2362 // RecordWrite may destroy all its register arguments.
2363 __ mr(r6, result_register());
2364 int offset = Context::SlotOffset(var->index());
2365 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2366 kDontSaveFPRegs);
2367 }
2368}
2369
2370
2371void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2372 FeedbackVectorSlot slot) {
2373 if (var->IsUnallocated()) {
2374 // Global var, const, or let.
2375 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2376 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2377 EmitLoadStoreICSlot(slot);
2378 CallStoreIC();
2379
2380 } else if (var->mode() == LET && op != Token::INIT) {
2381 // Non-initializing assignment to let variable needs a write barrier.
2382 DCHECK(!var->IsLookupSlot());
2383 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2384 Label assign;
2385 MemOperand location = VarOperand(var, r4);
2386 __ LoadP(r6, location);
2387 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2388 __ bne(&assign);
2389 __ mov(r6, Operand(var->name()));
2390 __ push(r6);
2391 __ CallRuntime(Runtime::kThrowReferenceError);
2392 // Perform the assignment.
2393 __ bind(&assign);
2394 EmitStoreToStackLocalOrContextSlot(var, location);
2395
2396 } else if (var->mode() == CONST && op != Token::INIT) {
2397 // Assignment to const variable needs a write barrier.
2398 DCHECK(!var->IsLookupSlot());
2399 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2400 Label const_error;
2401 MemOperand location = VarOperand(var, r4);
2402 __ LoadP(r6, location);
2403 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2404 __ bne(&const_error);
2405 __ mov(r6, Operand(var->name()));
2406 __ push(r6);
2407 __ CallRuntime(Runtime::kThrowReferenceError);
2408 __ bind(&const_error);
2409 __ CallRuntime(Runtime::kThrowConstAssignError);
2410
2411 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2412 // Initializing assignment to const {this} needs a write barrier.
2413 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2414 Label uninitialized_this;
2415 MemOperand location = VarOperand(var, r4);
2416 __ LoadP(r6, location);
2417 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2418 __ beq(&uninitialized_this);
2419 __ mov(r4, Operand(var->name()));
2420 __ push(r4);
2421 __ CallRuntime(Runtime::kThrowReferenceError);
2422 __ bind(&uninitialized_this);
2423 EmitStoreToStackLocalOrContextSlot(var, location);
2424
2425 } else if (!var->is_const_mode() ||
2426 (var->mode() == CONST && op == Token::INIT)) {
2427 if (var->IsLookupSlot()) {
2428 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002429 __ Push(var->name());
2430 __ Push(r3);
2431 __ CallRuntime(is_strict(language_mode())
2432 ? Runtime::kStoreLookupSlot_Strict
2433 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002434 } else {
2435 // Assignment to var or initializing assignment to let/const in harmony
2436 // mode.
2437 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2438 MemOperand location = VarOperand(var, r4);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002439 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002440 // Check for an uninitialized let binding.
2441 __ LoadP(r5, location);
2442 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2443 __ Check(eq, kLetBindingReInitialization);
2444 }
2445 EmitStoreToStackLocalOrContextSlot(var, location);
2446 }
2447 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2448 // Const initializers need a write barrier.
2449 DCHECK(!var->IsParameter()); // No const parameters.
2450 if (var->IsLookupSlot()) {
2451 __ push(r3);
2452 __ mov(r3, Operand(var->name()));
2453 __ Push(cp, r3); // Context and name.
2454 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2455 } else {
2456 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2457 Label skip;
2458 MemOperand location = VarOperand(var, r4);
2459 __ LoadP(r5, location);
2460 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2461 __ bne(&skip);
2462 EmitStoreToStackLocalOrContextSlot(var, location);
2463 __ bind(&skip);
2464 }
2465
2466 } else {
2467 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2468 if (is_strict(language_mode())) {
2469 __ CallRuntime(Runtime::kThrowConstAssignError);
2470 }
2471 // Silently ignore store in sloppy mode.
2472 }
2473}
2474
2475
2476void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2477 // Assignment to a property, using a named store IC.
2478 Property* prop = expr->target()->AsProperty();
2479 DCHECK(prop != NULL);
2480 DCHECK(prop->key()->IsLiteral());
2481
2482 __ mov(StoreDescriptor::NameRegister(),
2483 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002484 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002485 EmitLoadStoreICSlot(expr->AssignmentSlot());
2486 CallStoreIC();
2487
2488 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2489 context()->Plug(r3);
2490}
2491
2492
2493void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2494 // Assignment to named property of super.
2495 // r3 : value
2496 // stack : receiver ('this'), home_object
2497 DCHECK(prop != NULL);
2498 Literal* key = prop->key()->AsLiteral();
2499 DCHECK(key != NULL);
2500
Ben Murdoch097c5b22016-05-18 11:27:45 +01002501 PushOperand(key->value());
2502 PushOperand(r3);
2503 CallRuntimeWithOperands((is_strict(language_mode())
2504 ? Runtime::kStoreToSuper_Strict
2505 : Runtime::kStoreToSuper_Sloppy));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002506}
2507
2508
2509void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2510 // Assignment to named property of super.
2511 // r3 : value
2512 // stack : receiver ('this'), home_object, key
2513 DCHECK(prop != NULL);
2514
Ben Murdoch097c5b22016-05-18 11:27:45 +01002515 PushOperand(r3);
2516 CallRuntimeWithOperands((is_strict(language_mode())
2517 ? Runtime::kStoreKeyedToSuper_Strict
2518 : Runtime::kStoreKeyedToSuper_Sloppy));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002519}
2520
2521
2522void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2523 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002524 PopOperands(StoreDescriptor::ReceiverRegister(),
2525 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002526 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2527
2528 Handle<Code> ic =
2529 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2530 EmitLoadStoreICSlot(expr->AssignmentSlot());
2531 CallIC(ic);
2532
2533 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2534 context()->Plug(r3);
2535}
2536
2537
2538void FullCodeGenerator::VisitProperty(Property* expr) {
2539 Comment cmnt(masm_, "[ Property");
2540 SetExpressionPosition(expr);
2541
2542 Expression* key = expr->key();
2543
2544 if (key->IsPropertyName()) {
2545 if (!expr->IsSuperAccess()) {
2546 VisitForAccumulatorValue(expr->obj());
2547 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2548 EmitNamedPropertyLoad(expr);
2549 } else {
2550 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2551 VisitForStackValue(
2552 expr->obj()->AsSuperPropertyReference()->home_object());
2553 EmitNamedSuperPropertyLoad(expr);
2554 }
2555 } else {
2556 if (!expr->IsSuperAccess()) {
2557 VisitForStackValue(expr->obj());
2558 VisitForAccumulatorValue(expr->key());
2559 __ Move(LoadDescriptor::NameRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002560 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002561 EmitKeyedPropertyLoad(expr);
2562 } else {
2563 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2564 VisitForStackValue(
2565 expr->obj()->AsSuperPropertyReference()->home_object());
2566 VisitForStackValue(expr->key());
2567 EmitKeyedSuperPropertyLoad(expr);
2568 }
2569 }
2570 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2571 context()->Plug(r3);
2572}
2573
2574
2575void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2576 ic_total_count_++;
2577 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2578}
2579
2580
2581// Code common for calls using the IC.
2582void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2583 Expression* callee = expr->expression();
2584
2585 // Get the target function.
2586 ConvertReceiverMode convert_mode;
2587 if (callee->IsVariableProxy()) {
2588 {
2589 StackValueContext context(this);
2590 EmitVariableLoad(callee->AsVariableProxy());
2591 PrepareForBailout(callee, NO_REGISTERS);
2592 }
2593 // Push undefined as receiver. This is patched in the method prologue if it
2594 // is a sloppy mode method.
2595 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002596 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002597 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2598 } else {
2599 // Load the function from the receiver.
2600 DCHECK(callee->IsProperty());
2601 DCHECK(!callee->AsProperty()->IsSuperAccess());
2602 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2603 EmitNamedPropertyLoad(callee->AsProperty());
2604 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2605 // Push the target function under the receiver.
2606 __ LoadP(r0, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002607 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002608 __ StoreP(r3, MemOperand(sp, kPointerSize));
2609 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2610 }
2611
2612 EmitCall(expr, convert_mode);
2613}
2614
2615
2616void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2617 Expression* callee = expr->expression();
2618 DCHECK(callee->IsProperty());
2619 Property* prop = callee->AsProperty();
2620 DCHECK(prop->IsSuperAccess());
2621 SetExpressionPosition(prop);
2622
2623 Literal* key = prop->key()->AsLiteral();
2624 DCHECK(!key->value()->IsSmi());
2625 // Load the function from the receiver.
2626 const Register scratch = r4;
2627 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2628 VisitForAccumulatorValue(super_ref->home_object());
2629 __ mr(scratch, r3);
2630 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002631 PushOperands(scratch, r3, r3, scratch);
2632 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002633
2634 // Stack here:
2635 // - home_object
2636 // - this (receiver)
2637 // - this (receiver) <-- LoadFromSuper will pop here and below.
2638 // - home_object
2639 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002640 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002641
2642 // Replace home_object with target function.
2643 __ StoreP(r3, MemOperand(sp, kPointerSize));
2644
2645 // Stack here:
2646 // - target function
2647 // - this (receiver)
2648 EmitCall(expr);
2649}
2650
2651
2652// Code common for calls using the IC.
2653void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2654 // Load the key.
2655 VisitForAccumulatorValue(key);
2656
2657 Expression* callee = expr->expression();
2658
2659 // Load the function from the receiver.
2660 DCHECK(callee->IsProperty());
2661 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2662 __ Move(LoadDescriptor::NameRegister(), r3);
2663 EmitKeyedPropertyLoad(callee->AsProperty());
2664 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2665
2666 // Push the target function under the receiver.
2667 __ LoadP(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002668 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002669 __ StoreP(r3, MemOperand(sp, kPointerSize));
2670
2671 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2672}
2673
2674
2675void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2676 Expression* callee = expr->expression();
2677 DCHECK(callee->IsProperty());
2678 Property* prop = callee->AsProperty();
2679 DCHECK(prop->IsSuperAccess());
2680
2681 SetExpressionPosition(prop);
2682 // Load the function from the receiver.
2683 const Register scratch = r4;
2684 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2685 VisitForAccumulatorValue(super_ref->home_object());
2686 __ mr(scratch, r3);
2687 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002688 PushOperands(scratch, r3, r3, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002689 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002690
2691 // Stack here:
2692 // - home_object
2693 // - this (receiver)
2694 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2695 // - home_object
2696 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002697 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002698
2699 // Replace home_object with target function.
2700 __ StoreP(r3, MemOperand(sp, kPointerSize));
2701
2702 // Stack here:
2703 // - target function
2704 // - this (receiver)
2705 EmitCall(expr);
2706}
2707
2708
2709void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2710 // Load the arguments.
2711 ZoneList<Expression*>* args = expr->arguments();
2712 int arg_count = args->length();
2713 for (int i = 0; i < arg_count; i++) {
2714 VisitForStackValue(args->at(i));
2715 }
2716
2717 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2718 SetCallPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002719 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2720 if (FLAG_trace) {
2721 __ CallRuntime(Runtime::kTraceTailCall);
2722 }
2723 // Update profiling counters before the tail call since we will
2724 // not return to this function.
2725 EmitProfilingCounterHandlingForReturnSequence(true);
2726 }
2727 Handle<Code> ic =
2728 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2729 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002730 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
2731 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2732 // Don't assign a type feedback id to the IC, since type feedback is provided
2733 // by the vector above.
2734 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002735 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002736
2737 RecordJSReturnSite(expr);
2738 // Restore context register.
2739 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2740 context()->DropAndPlug(1, r3);
2741}
2742
2743
2744void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2745 // r7: copy of the first argument or undefined if it doesn't exist.
2746 if (arg_count > 0) {
2747 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
2748 } else {
2749 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2750 }
2751
2752 // r6: the receiver of the enclosing function.
2753 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2754
2755 // r5: language mode.
2756 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
2757
2758 // r4: the start position of the scope the calls resides in.
2759 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
2760
2761 // Do the runtime call.
2762 __ Push(r7, r6, r5, r4);
2763 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2764}
2765
2766
2767// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2768void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2769 VariableProxy* callee = expr->expression()->AsVariableProxy();
2770 if (callee->var()->IsLookupSlot()) {
2771 Label slow, done;
2772 SetExpressionPosition(callee);
2773 // Generate code for loading from variables potentially shadowed by
2774 // eval-introduced variables.
2775 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2776
2777 __ bind(&slow);
2778 // Call the runtime to find the function to call (returned in r3) and
2779 // the object holding it (returned in r4).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002780 __ Push(callee->name());
2781 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2782 PushOperands(r3, r4); // Function, receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002783 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2784
2785 // If fast case code has been generated, emit code to push the function
2786 // and receiver and have the slow path jump around this code.
2787 if (done.is_linked()) {
2788 Label call;
2789 __ b(&call);
2790 __ bind(&done);
2791 // Push function.
2792 __ push(r3);
2793 // Pass undefined as the receiver, which is the WithBaseObject of a
2794 // non-object environment record. If the callee is sloppy, it will patch
2795 // it up to be the global receiver.
2796 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2797 __ push(r4);
2798 __ bind(&call);
2799 }
2800 } else {
2801 VisitForStackValue(callee);
2802 // refEnv.WithBaseObject()
2803 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002804 PushOperand(r5); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002805 }
2806}
2807
2808
2809void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2810 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2811 // to resolve the function we need to call. Then we call the resolved
2812 // function using the given arguments.
2813 ZoneList<Expression*>* args = expr->arguments();
2814 int arg_count = args->length();
2815
2816 PushCalleeAndWithBaseObject(expr);
2817
2818 // Push the arguments.
2819 for (int i = 0; i < arg_count; i++) {
2820 VisitForStackValue(args->at(i));
2821 }
2822
2823 // Push a copy of the function (found below the arguments) and
2824 // resolve eval.
2825 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2826 __ push(r4);
2827 EmitResolvePossiblyDirectEval(arg_count);
2828
2829 // Touch up the stack with the resolved function.
2830 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2831
2832 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2833
2834 // Record source position for debugger.
2835 SetCallPosition(expr);
2836 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2837 __ mov(r3, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002838 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2839 expr->tail_call_mode()),
2840 RelocInfo::CODE_TARGET);
2841 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002842 RecordJSReturnSite(expr);
2843 // Restore context register.
2844 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2845 context()->DropAndPlug(1, r3);
2846}
2847
2848
2849void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2850 Comment cmnt(masm_, "[ CallNew");
2851 // According to ECMA-262, section 11.2.2, page 44, the function
2852 // expression in new calls must be evaluated before the
2853 // arguments.
2854
2855 // Push constructor on the stack. If it's not a function it's used as
2856 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2857 // ignored.
2858 DCHECK(!expr->expression()->IsSuperPropertyReference());
2859 VisitForStackValue(expr->expression());
2860
2861 // Push the arguments ("left-to-right") on the stack.
2862 ZoneList<Expression*>* args = expr->arguments();
2863 int arg_count = args->length();
2864 for (int i = 0; i < arg_count; i++) {
2865 VisitForStackValue(args->at(i));
2866 }
2867
2868 // Call the construct call builtin that handles allocation and
2869 // constructor invocation.
2870 SetConstructCallPosition(expr);
2871
2872 // Load function and argument count into r4 and r3.
2873 __ mov(r3, Operand(arg_count));
2874 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2875
2876 // Record call targets in unoptimized code.
2877 __ EmitLoadTypeFeedbackVector(r5);
2878 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
2879
2880 CallConstructStub stub(isolate());
2881 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002882 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002883 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2884 // Restore context register.
2885 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2886 context()->Plug(r3);
2887}
2888
2889
2890void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2891 SuperCallReference* super_call_ref =
2892 expr->expression()->AsSuperCallReference();
2893 DCHECK_NOT_NULL(super_call_ref);
2894
2895 // Push the super constructor target on the stack (may be null,
2896 // but the Construct builtin can deal with that properly).
2897 VisitForAccumulatorValue(super_call_ref->this_function_var());
2898 __ AssertFunction(result_register());
2899 __ LoadP(result_register(),
2900 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2901 __ LoadP(result_register(),
2902 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002903 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002904
2905 // Push the arguments ("left-to-right") on the stack.
2906 ZoneList<Expression*>* args = expr->arguments();
2907 int arg_count = args->length();
2908 for (int i = 0; i < arg_count; i++) {
2909 VisitForStackValue(args->at(i));
2910 }
2911
2912 // Call the construct call builtin that handles allocation and
2913 // constructor invocation.
2914 SetConstructCallPosition(expr);
2915
2916 // Load new target into r6.
2917 VisitForAccumulatorValue(super_call_ref->new_target_var());
2918 __ mr(r6, result_register());
2919
2920 // Load function and argument count into r1 and r0.
2921 __ mov(r3, Operand(arg_count));
2922 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
2923
2924 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002925 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002926
2927 RecordJSReturnSite(expr);
2928
2929 // Restore context register.
2930 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2931 context()->Plug(r3);
2932}
2933
2934
2935void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2936 ZoneList<Expression*>* args = expr->arguments();
2937 DCHECK(args->length() == 1);
2938
2939 VisitForAccumulatorValue(args->at(0));
2940
2941 Label materialize_true, materialize_false;
2942 Label* if_true = NULL;
2943 Label* if_false = NULL;
2944 Label* fall_through = NULL;
2945 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2946 &if_false, &fall_through);
2947
2948 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2949 __ TestIfSmi(r3, r0);
2950 Split(eq, if_true, if_false, fall_through, cr0);
2951
2952 context()->Plug(if_true, if_false);
2953}
2954
2955
2956void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2957 ZoneList<Expression*>* args = expr->arguments();
2958 DCHECK(args->length() == 1);
2959
2960 VisitForAccumulatorValue(args->at(0));
2961
2962 Label materialize_true, materialize_false;
2963 Label* if_true = NULL;
2964 Label* if_false = NULL;
2965 Label* fall_through = NULL;
2966 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2967 &if_false, &fall_through);
2968
2969 __ JumpIfSmi(r3, if_false);
2970 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2971 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2972 Split(ge, if_true, if_false, fall_through);
2973
2974 context()->Plug(if_true, if_false);
2975}
2976
2977
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002978void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2979 ZoneList<Expression*>* args = expr->arguments();
2980 DCHECK(args->length() == 1);
2981
2982 VisitForAccumulatorValue(args->at(0));
2983
2984 Label materialize_true, materialize_false;
2985 Label* if_true = NULL;
2986 Label* if_false = NULL;
2987 Label* fall_through = NULL;
2988 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2989 &if_false, &fall_through);
2990
2991 __ JumpIfSmi(r3, if_false);
2992 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
2993 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2994 Split(eq, if_true, if_false, fall_through);
2995
2996 context()->Plug(if_true, if_false);
2997}
2998
2999
3000void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3001 ZoneList<Expression*>* args = expr->arguments();
3002 DCHECK(args->length() == 1);
3003
3004 VisitForAccumulatorValue(args->at(0));
3005
3006 Label materialize_true, materialize_false;
3007 Label* if_true = NULL;
3008 Label* if_false = NULL;
3009 Label* fall_through = NULL;
3010 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3011 &if_false, &fall_through);
3012
3013 __ JumpIfSmi(r3, if_false);
3014 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
3015 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3016 Split(eq, if_true, if_false, fall_through);
3017
3018 context()->Plug(if_true, if_false);
3019}
3020
3021
3022void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3023 ZoneList<Expression*>* args = expr->arguments();
3024 DCHECK(args->length() == 1);
3025
3026 VisitForAccumulatorValue(args->at(0));
3027
3028 Label materialize_true, materialize_false;
3029 Label* if_true = NULL;
3030 Label* if_false = NULL;
3031 Label* fall_through = NULL;
3032 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3033 &if_false, &fall_through);
3034
3035 __ JumpIfSmi(r3, if_false);
3036 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3037 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3038 Split(eq, if_true, if_false, fall_through);
3039
3040 context()->Plug(if_true, if_false);
3041}
3042
3043
3044void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3045 ZoneList<Expression*>* args = expr->arguments();
3046 DCHECK(args->length() == 1);
3047
3048 VisitForAccumulatorValue(args->at(0));
3049
3050 Label materialize_true, materialize_false;
3051 Label* if_true = NULL;
3052 Label* if_false = NULL;
3053 Label* fall_through = NULL;
3054 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3055 &if_false, &fall_through);
3056
3057 __ JumpIfSmi(r3, if_false);
3058 __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE);
3059 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3060 Split(eq, if_true, if_false, fall_through);
3061
3062 context()->Plug(if_true, if_false);
3063}
3064
3065
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003066void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3067 ZoneList<Expression*>* args = expr->arguments();
3068 DCHECK(args->length() == 1);
3069 Label done, null, function, non_function_constructor;
3070
3071 VisitForAccumulatorValue(args->at(0));
3072
3073 // If the object is not a JSReceiver, we return null.
3074 __ JumpIfSmi(r3, &null);
3075 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3076 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
3077 // Map is now in r3.
3078 __ blt(&null);
3079
3080 // Return 'Function' for JSFunction objects.
3081 __ cmpi(r4, Operand(JS_FUNCTION_TYPE));
3082 __ beq(&function);
3083
3084 // Check if the constructor in the map is a JS function.
3085 Register instance_type = r5;
3086 __ GetMapConstructor(r3, r3, r4, instance_type);
3087 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
3088 __ bne(&non_function_constructor);
3089
3090 // r3 now contains the constructor function. Grab the
3091 // instance class name from there.
3092 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3093 __ LoadP(r3,
3094 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3095 __ b(&done);
3096
3097 // Functions have class 'Function'.
3098 __ bind(&function);
3099 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3100 __ b(&done);
3101
3102 // Objects with a non-function constructor have class 'Object'.
3103 __ bind(&non_function_constructor);
3104 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3105 __ b(&done);
3106
3107 // Non-JS objects have class null.
3108 __ bind(&null);
3109 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3110
3111 // All done.
3112 __ bind(&done);
3113
3114 context()->Plug(r3);
3115}
3116
3117
3118void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3119 ZoneList<Expression*>* args = expr->arguments();
3120 DCHECK(args->length() == 1);
3121 VisitForAccumulatorValue(args->at(0)); // Load the object.
3122
3123 Label done;
3124 // If the object is a smi return the object.
3125 __ JumpIfSmi(r3, &done);
3126 // If the object is not a value type, return the object.
3127 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3128 __ bne(&done);
3129 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3130
3131 __ bind(&done);
3132 context()->Plug(r3);
3133}
3134
3135
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003136void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3137 ZoneList<Expression*>* args = expr->arguments();
3138 DCHECK_EQ(3, args->length());
3139
3140 Register string = r3;
3141 Register index = r4;
3142 Register value = r5;
3143
3144 VisitForStackValue(args->at(0)); // index
3145 VisitForStackValue(args->at(1)); // value
3146 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003147 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003148
3149 if (FLAG_debug_code) {
3150 __ TestIfSmi(value, r0);
3151 __ Check(eq, kNonSmiValue, cr0);
3152 __ TestIfSmi(index, r0);
3153 __ Check(eq, kNonSmiIndex, cr0);
3154 __ SmiUntag(index, index);
3155 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3156 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3157 __ SmiTag(index, index);
3158 }
3159
3160 __ SmiUntag(value);
3161 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3162 __ SmiToByteArrayOffset(r0, index);
3163 __ stbx(value, MemOperand(ip, r0));
3164 context()->Plug(string);
3165}
3166
3167
3168void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3169 ZoneList<Expression*>* args = expr->arguments();
3170 DCHECK_EQ(3, args->length());
3171
3172 Register string = r3;
3173 Register index = r4;
3174 Register value = r5;
3175
3176 VisitForStackValue(args->at(0)); // index
3177 VisitForStackValue(args->at(1)); // value
3178 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003179 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003180
3181 if (FLAG_debug_code) {
3182 __ TestIfSmi(value, r0);
3183 __ Check(eq, kNonSmiValue, cr0);
3184 __ TestIfSmi(index, r0);
3185 __ Check(eq, kNonSmiIndex, cr0);
3186 __ SmiUntag(index, index);
3187 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3188 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3189 __ SmiTag(index, index);
3190 }
3191
3192 __ SmiUntag(value);
3193 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3194 __ SmiToShortArrayOffset(r0, index);
3195 __ sthx(value, MemOperand(ip, r0));
3196 context()->Plug(string);
3197}
3198
3199
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003200void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3201 ZoneList<Expression*>* args = expr->arguments();
3202 DCHECK_EQ(1, args->length());
3203
3204 // Load the argument into r3 and convert it.
3205 VisitForAccumulatorValue(args->at(0));
3206
3207 // Convert the object to an integer.
3208 Label done_convert;
3209 __ JumpIfSmi(r3, &done_convert);
3210 __ Push(r3);
3211 __ CallRuntime(Runtime::kToInteger);
3212 __ bind(&done_convert);
3213 context()->Plug(r3);
3214}
3215
3216
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003217void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3218 ZoneList<Expression*>* args = expr->arguments();
3219 DCHECK(args->length() == 1);
3220 VisitForAccumulatorValue(args->at(0));
3221
3222 Label done;
3223 StringCharFromCodeGenerator generator(r3, r4);
3224 generator.GenerateFast(masm_);
3225 __ b(&done);
3226
3227 NopRuntimeCallHelper call_helper;
3228 generator.GenerateSlow(masm_, call_helper);
3229
3230 __ bind(&done);
3231 context()->Plug(r4);
3232}
3233
3234
3235void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3236 ZoneList<Expression*>* args = expr->arguments();
3237 DCHECK(args->length() == 2);
3238 VisitForStackValue(args->at(0));
3239 VisitForAccumulatorValue(args->at(1));
3240
3241 Register object = r4;
3242 Register index = r3;
3243 Register result = r6;
3244
Ben Murdoch097c5b22016-05-18 11:27:45 +01003245 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003246
3247 Label need_conversion;
3248 Label index_out_of_range;
3249 Label done;
3250 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
3251 &need_conversion, &index_out_of_range,
3252 STRING_INDEX_IS_NUMBER);
3253 generator.GenerateFast(masm_);
3254 __ b(&done);
3255
3256 __ bind(&index_out_of_range);
3257 // When the index is out of range, the spec requires us to return
3258 // NaN.
3259 __ LoadRoot(result, Heap::kNanValueRootIndex);
3260 __ b(&done);
3261
3262 __ bind(&need_conversion);
3263 // Load the undefined value into the result register, which will
3264 // trigger conversion.
3265 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3266 __ b(&done);
3267
3268 NopRuntimeCallHelper call_helper;
3269 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3270
3271 __ bind(&done);
3272 context()->Plug(result);
3273}
3274
3275
3276void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3277 ZoneList<Expression*>* args = expr->arguments();
3278 DCHECK(args->length() == 2);
3279 VisitForStackValue(args->at(0));
3280 VisitForAccumulatorValue(args->at(1));
3281
3282 Register object = r4;
3283 Register index = r3;
3284 Register scratch = r6;
3285 Register result = r3;
3286
Ben Murdoch097c5b22016-05-18 11:27:45 +01003287 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003288
3289 Label need_conversion;
3290 Label index_out_of_range;
3291 Label done;
3292 StringCharAtGenerator generator(object, index, scratch, result,
3293 &need_conversion, &need_conversion,
3294 &index_out_of_range, STRING_INDEX_IS_NUMBER);
3295 generator.GenerateFast(masm_);
3296 __ b(&done);
3297
3298 __ bind(&index_out_of_range);
3299 // When the index is out of range, the spec requires us to return
3300 // the empty string.
3301 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3302 __ b(&done);
3303
3304 __ bind(&need_conversion);
3305 // Move smi zero into the result register, which will trigger
3306 // conversion.
3307 __ LoadSmiLiteral(result, Smi::FromInt(0));
3308 __ b(&done);
3309
3310 NopRuntimeCallHelper call_helper;
3311 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3312
3313 __ bind(&done);
3314 context()->Plug(result);
3315}
3316
3317
3318void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3319 ZoneList<Expression*>* args = expr->arguments();
3320 DCHECK_LE(2, args->length());
3321 // Push target, receiver and arguments onto the stack.
3322 for (Expression* const arg : *args) {
3323 VisitForStackValue(arg);
3324 }
3325 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3326 // Move target to r4.
3327 int const argc = args->length() - 2;
3328 __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize));
3329 // Call the target.
3330 __ mov(r3, Operand(argc));
3331 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003332 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003333 // Restore context register.
3334 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3335 // Discard the function left on TOS.
3336 context()->DropAndPlug(1, r3);
3337}
3338
3339
3340void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3341 ZoneList<Expression*>* args = expr->arguments();
3342 VisitForAccumulatorValue(args->at(0));
3343
3344 Label materialize_true, materialize_false;
3345 Label* if_true = NULL;
3346 Label* if_false = NULL;
3347 Label* fall_through = NULL;
3348 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3349 &if_false, &fall_through);
3350
3351 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3352 // PPC - assume ip is free
3353 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
3354 __ and_(r0, r3, ip, SetRC);
3355 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3356 Split(eq, if_true, if_false, fall_through, cr0);
3357
3358 context()->Plug(if_true, if_false);
3359}
3360
3361
3362void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3363 ZoneList<Expression*>* args = expr->arguments();
3364 DCHECK(args->length() == 1);
3365 VisitForAccumulatorValue(args->at(0));
3366
3367 __ AssertString(r3);
3368
3369 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3370 __ IndexFromHash(r3, r3);
3371
3372 context()->Plug(r3);
3373}
3374
3375
3376void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3377 ZoneList<Expression*>* args = expr->arguments();
3378 DCHECK_EQ(1, args->length());
3379 VisitForAccumulatorValue(args->at(0));
3380 __ AssertFunction(r3);
3381 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3382 __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset));
3383 context()->Plug(r3);
3384}
3385
3386
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003387void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3388 DCHECK(expr->arguments()->length() == 0);
3389 ExternalReference debug_is_active =
3390 ExternalReference::debug_is_active_address(isolate());
3391 __ mov(ip, Operand(debug_is_active));
3392 __ lbz(r3, MemOperand(ip));
3393 __ SmiTag(r3);
3394 context()->Plug(r3);
3395}
3396
3397
3398void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3399 ZoneList<Expression*>* args = expr->arguments();
3400 DCHECK_EQ(2, args->length());
3401 VisitForStackValue(args->at(0));
3402 VisitForStackValue(args->at(1));
3403
3404 Label runtime, done;
3405
3406 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime, TAG_OBJECT);
3407 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
3408 __ Pop(r5, r6);
3409 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
3410 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
3411 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
3412 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
3413 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
3414 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
3415 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3416 __ b(&done);
3417
3418 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003419 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003420
3421 __ bind(&done);
3422 context()->Plug(r3);
3423}
3424
3425
3426void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3427 // Push undefined as the receiver.
3428 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003429 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003430
3431 __ LoadNativeContextSlot(expr->context_index(), r3);
3432}
3433
3434
3435void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3436 ZoneList<Expression*>* args = expr->arguments();
3437 int arg_count = args->length();
3438
3439 SetCallPosition(expr);
3440 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3441 __ mov(r3, Operand(arg_count));
3442 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3443 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003444 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003445}
3446
3447
3448void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3449 ZoneList<Expression*>* args = expr->arguments();
3450 int arg_count = args->length();
3451
3452 if (expr->is_jsruntime()) {
3453 Comment cmnt(masm_, "[ CallRuntime");
3454 EmitLoadJSRuntimeFunction(expr);
3455
3456 // Push the target function under the receiver.
3457 __ LoadP(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003458 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003459 __ StoreP(r3, MemOperand(sp, kPointerSize));
3460
3461 // Push the arguments ("left-to-right").
3462 for (int i = 0; i < arg_count; i++) {
3463 VisitForStackValue(args->at(i));
3464 }
3465
3466 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3467 EmitCallJSRuntimeFunction(expr);
3468
3469 // Restore context register.
3470 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3471
3472 context()->DropAndPlug(1, r3);
3473
3474 } else {
3475 const Runtime::Function* function = expr->function();
3476 switch (function->function_id) {
3477#define CALL_INTRINSIC_GENERATOR(Name) \
3478 case Runtime::kInline##Name: { \
3479 Comment cmnt(masm_, "[ Inline" #Name); \
3480 return Emit##Name(expr); \
3481 }
3482 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3483#undef CALL_INTRINSIC_GENERATOR
3484 default: {
3485 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3486 // Push the arguments ("left-to-right").
3487 for (int i = 0; i < arg_count; i++) {
3488 VisitForStackValue(args->at(i));
3489 }
3490
3491 // Call the C runtime function.
3492 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3493 __ CallRuntime(expr->function(), arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003494 OperandStackDepthDecrement(arg_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003495 context()->Plug(r3);
3496 }
3497 }
3498 }
3499}
3500
3501
3502void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3503 switch (expr->op()) {
3504 case Token::DELETE: {
3505 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3506 Property* property = expr->expression()->AsProperty();
3507 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3508
3509 if (property != NULL) {
3510 VisitForStackValue(property->obj());
3511 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003512 CallRuntimeWithOperands(is_strict(language_mode())
3513 ? Runtime::kDeleteProperty_Strict
3514 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003515 context()->Plug(r3);
3516 } else if (proxy != NULL) {
3517 Variable* var = proxy->var();
3518 // Delete of an unqualified identifier is disallowed in strict mode but
3519 // "delete this" is allowed.
3520 bool is_this = var->HasThisName(isolate());
3521 DCHECK(is_sloppy(language_mode()) || is_this);
3522 if (var->IsUnallocatedOrGlobalSlot()) {
3523 __ LoadGlobalObject(r5);
3524 __ mov(r4, Operand(var->name()));
3525 __ Push(r5, r4);
3526 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3527 context()->Plug(r3);
3528 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3529 // Result of deleting non-global, non-dynamic variables is false.
3530 // The subexpression does not have side effects.
3531 context()->Plug(is_this);
3532 } else {
3533 // Non-global variable. Call the runtime to try to delete from the
3534 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003535 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003536 __ CallRuntime(Runtime::kDeleteLookupSlot);
3537 context()->Plug(r3);
3538 }
3539 } else {
3540 // Result of deleting non-property, non-variable reference is true.
3541 // The subexpression may have side effects.
3542 VisitForEffect(expr->expression());
3543 context()->Plug(true);
3544 }
3545 break;
3546 }
3547
3548 case Token::VOID: {
3549 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3550 VisitForEffect(expr->expression());
3551 context()->Plug(Heap::kUndefinedValueRootIndex);
3552 break;
3553 }
3554
3555 case Token::NOT: {
3556 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3557 if (context()->IsEffect()) {
3558 // Unary NOT has no side effects so it's only necessary to visit the
3559 // subexpression. Match the optimizing compiler by not branching.
3560 VisitForEffect(expr->expression());
3561 } else if (context()->IsTest()) {
3562 const TestContext* test = TestContext::cast(context());
3563 // The labels are swapped for the recursive call.
3564 VisitForControl(expr->expression(), test->false_label(),
3565 test->true_label(), test->fall_through());
3566 context()->Plug(test->true_label(), test->false_label());
3567 } else {
3568 // We handle value contexts explicitly rather than simply visiting
3569 // for control and plugging the control flow into the context,
3570 // because we need to prepare a pair of extra administrative AST ids
3571 // for the optimizing compiler.
3572 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3573 Label materialize_true, materialize_false, done;
3574 VisitForControl(expr->expression(), &materialize_false,
3575 &materialize_true, &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003576 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003577 __ bind(&materialize_true);
3578 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3579 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
3580 if (context()->IsStackValue()) __ push(r3);
3581 __ b(&done);
3582 __ bind(&materialize_false);
3583 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3584 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
3585 if (context()->IsStackValue()) __ push(r3);
3586 __ bind(&done);
3587 }
3588 break;
3589 }
3590
3591 case Token::TYPEOF: {
3592 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3593 {
3594 AccumulatorValueContext context(this);
3595 VisitForTypeofValue(expr->expression());
3596 }
3597 __ mr(r6, r3);
3598 TypeofStub typeof_stub(isolate());
3599 __ CallStub(&typeof_stub);
3600 context()->Plug(r3);
3601 break;
3602 }
3603
3604 default:
3605 UNREACHABLE();
3606 }
3607}
3608
3609
3610void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3611 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3612
3613 Comment cmnt(masm_, "[ CountOperation");
3614
3615 Property* prop = expr->expression()->AsProperty();
3616 LhsKind assign_type = Property::GetAssignType(prop);
3617
3618 // Evaluate expression and get value.
3619 if (assign_type == VARIABLE) {
3620 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3621 AccumulatorValueContext context(this);
3622 EmitVariableLoad(expr->expression()->AsVariableProxy());
3623 } else {
3624 // Reserve space for result of postfix operation.
3625 if (expr->is_postfix() && !context()->IsEffect()) {
3626 __ LoadSmiLiteral(ip, Smi::FromInt(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003627 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003628 }
3629 switch (assign_type) {
3630 case NAMED_PROPERTY: {
3631 // Put the object both on the stack and in the register.
3632 VisitForStackValue(prop->obj());
3633 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3634 EmitNamedPropertyLoad(prop);
3635 break;
3636 }
3637
3638 case NAMED_SUPER_PROPERTY: {
3639 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3640 VisitForAccumulatorValue(
3641 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003642 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003643 const Register scratch = r4;
3644 __ LoadP(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003645 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003646 EmitNamedSuperPropertyLoad(prop);
3647 break;
3648 }
3649
3650 case KEYED_SUPER_PROPERTY: {
3651 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3652 VisitForAccumulatorValue(
3653 prop->obj()->AsSuperPropertyReference()->home_object());
3654 const Register scratch = r4;
3655 const Register scratch1 = r5;
3656 __ mr(scratch, result_register());
3657 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003658 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003659 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003660 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003661 EmitKeyedSuperPropertyLoad(prop);
3662 break;
3663 }
3664
3665 case KEYED_PROPERTY: {
3666 VisitForStackValue(prop->obj());
3667 VisitForStackValue(prop->key());
3668 __ LoadP(LoadDescriptor::ReceiverRegister(),
3669 MemOperand(sp, 1 * kPointerSize));
3670 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3671 EmitKeyedPropertyLoad(prop);
3672 break;
3673 }
3674
3675 case VARIABLE:
3676 UNREACHABLE();
3677 }
3678 }
3679
3680 // We need a second deoptimization point after loading the value
3681 // in case evaluating the property load my have a side effect.
3682 if (assign_type == VARIABLE) {
3683 PrepareForBailout(expr->expression(), TOS_REG);
3684 } else {
3685 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3686 }
3687
3688 // Inline smi case if we are in a loop.
3689 Label stub_call, done;
3690 JumpPatchSite patch_site(masm_);
3691
3692 int count_value = expr->op() == Token::INC ? 1 : -1;
3693 if (ShouldInlineSmiCase(expr->op())) {
3694 Label slow;
3695 patch_site.EmitJumpIfNotSmi(r3, &slow);
3696
3697 // Save result for postfix expressions.
3698 if (expr->is_postfix()) {
3699 if (!context()->IsEffect()) {
3700 // Save the result on the stack. If we have a named or keyed property
3701 // we store the result under the receiver that is currently on top
3702 // of the stack.
3703 switch (assign_type) {
3704 case VARIABLE:
3705 __ push(r3);
3706 break;
3707 case NAMED_PROPERTY:
3708 __ StoreP(r3, MemOperand(sp, kPointerSize));
3709 break;
3710 case NAMED_SUPER_PROPERTY:
3711 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3712 break;
3713 case KEYED_PROPERTY:
3714 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3715 break;
3716 case KEYED_SUPER_PROPERTY:
3717 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3718 break;
3719 }
3720 }
3721 }
3722
3723 Register scratch1 = r4;
3724 Register scratch2 = r5;
3725 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
3726 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
3727 __ BranchOnNoOverflow(&done);
3728 // Call stub. Undo operation first.
3729 __ sub(r3, r3, scratch1);
3730 __ b(&stub_call);
3731 __ bind(&slow);
3732 }
3733 if (!is_strong(language_mode())) {
3734 ToNumberStub convert_stub(isolate());
3735 __ CallStub(&convert_stub);
3736 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3737 }
3738
3739 // Save result for postfix expressions.
3740 if (expr->is_postfix()) {
3741 if (!context()->IsEffect()) {
3742 // Save the result on the stack. If we have a named or keyed property
3743 // we store the result under the receiver that is currently on top
3744 // of the stack.
3745 switch (assign_type) {
3746 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003747 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003748 break;
3749 case NAMED_PROPERTY:
3750 __ StoreP(r3, MemOperand(sp, kPointerSize));
3751 break;
3752 case NAMED_SUPER_PROPERTY:
3753 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3754 break;
3755 case KEYED_PROPERTY:
3756 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3757 break;
3758 case KEYED_SUPER_PROPERTY:
3759 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3760 break;
3761 }
3762 }
3763 }
3764
3765 __ bind(&stub_call);
3766 __ mr(r4, r3);
3767 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
3768
3769 SetExpressionPosition(expr);
3770
Ben Murdoch097c5b22016-05-18 11:27:45 +01003771 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003772 CallIC(code, expr->CountBinOpFeedbackId());
3773 patch_site.EmitPatchInfo();
3774 __ bind(&done);
3775
3776 if (is_strong(language_mode())) {
3777 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3778 }
3779 // Store the value returned in r3.
3780 switch (assign_type) {
3781 case VARIABLE:
3782 if (expr->is_postfix()) {
3783 {
3784 EffectContext context(this);
3785 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3786 Token::ASSIGN, expr->CountSlot());
3787 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3788 context.Plug(r3);
3789 }
3790 // For all contexts except EffectConstant We have the result on
3791 // top of the stack.
3792 if (!context()->IsEffect()) {
3793 context()->PlugTOS();
3794 }
3795 } else {
3796 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3797 Token::ASSIGN, expr->CountSlot());
3798 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3799 context()->Plug(r3);
3800 }
3801 break;
3802 case NAMED_PROPERTY: {
3803 __ mov(StoreDescriptor::NameRegister(),
3804 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003805 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003806 EmitLoadStoreICSlot(expr->CountSlot());
3807 CallStoreIC();
3808 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3809 if (expr->is_postfix()) {
3810 if (!context()->IsEffect()) {
3811 context()->PlugTOS();
3812 }
3813 } else {
3814 context()->Plug(r3);
3815 }
3816 break;
3817 }
3818 case NAMED_SUPER_PROPERTY: {
3819 EmitNamedSuperPropertyStore(prop);
3820 if (expr->is_postfix()) {
3821 if (!context()->IsEffect()) {
3822 context()->PlugTOS();
3823 }
3824 } else {
3825 context()->Plug(r3);
3826 }
3827 break;
3828 }
3829 case KEYED_SUPER_PROPERTY: {
3830 EmitKeyedSuperPropertyStore(prop);
3831 if (expr->is_postfix()) {
3832 if (!context()->IsEffect()) {
3833 context()->PlugTOS();
3834 }
3835 } else {
3836 context()->Plug(r3);
3837 }
3838 break;
3839 }
3840 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003841 PopOperands(StoreDescriptor::ReceiverRegister(),
3842 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003843 Handle<Code> ic =
3844 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3845 EmitLoadStoreICSlot(expr->CountSlot());
3846 CallIC(ic);
3847 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3848 if (expr->is_postfix()) {
3849 if (!context()->IsEffect()) {
3850 context()->PlugTOS();
3851 }
3852 } else {
3853 context()->Plug(r3);
3854 }
3855 break;
3856 }
3857 }
3858}
3859
3860
3861void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3862 Expression* sub_expr,
3863 Handle<String> check) {
3864 Label materialize_true, materialize_false;
3865 Label* if_true = NULL;
3866 Label* if_false = NULL;
3867 Label* fall_through = NULL;
3868 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3869 &if_false, &fall_through);
3870
3871 {
3872 AccumulatorValueContext context(this);
3873 VisitForTypeofValue(sub_expr);
3874 }
3875 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3876
3877 Factory* factory = isolate()->factory();
3878 if (String::Equals(check, factory->number_string())) {
3879 __ JumpIfSmi(r3, if_true);
3880 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3881 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3882 __ cmp(r3, ip);
3883 Split(eq, if_true, if_false, fall_through);
3884 } else if (String::Equals(check, factory->string_string())) {
3885 __ JumpIfSmi(r3, if_false);
3886 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
3887 Split(lt, if_true, if_false, fall_through);
3888 } else if (String::Equals(check, factory->symbol_string())) {
3889 __ JumpIfSmi(r3, if_false);
3890 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
3891 Split(eq, if_true, if_false, fall_through);
3892 } else if (String::Equals(check, factory->boolean_string())) {
3893 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3894 __ beq(if_true);
3895 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
3896 Split(eq, if_true, if_false, fall_through);
3897 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003898 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3899 __ beq(if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003900 __ JumpIfSmi(r3, if_false);
3901 // Check for undetectable objects => true.
3902 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3903 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3904 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3905 Split(ne, if_true, if_false, fall_through, cr0);
3906
3907 } else if (String::Equals(check, factory->function_string())) {
3908 __ JumpIfSmi(r3, if_false);
3909 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3910 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3911 __ andi(r4, r4,
3912 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3913 __ cmpi(r4, Operand(1 << Map::kIsCallable));
3914 Split(eq, if_true, if_false, fall_through);
3915 } else if (String::Equals(check, factory->object_string())) {
3916 __ JumpIfSmi(r3, if_false);
3917 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3918 __ beq(if_true);
3919 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3920 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
3921 __ blt(if_false);
3922 // Check for callable or undetectable objects => false.
3923 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3924 __ andi(r0, r4,
3925 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3926 Split(eq, if_true, if_false, fall_through, cr0);
3927// clang-format off
3928#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3929 } else if (String::Equals(check, factory->type##_string())) { \
3930 __ JumpIfSmi(r3, if_false); \
3931 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); \
3932 __ CompareRoot(r3, Heap::k##Type##MapRootIndex); \
3933 Split(eq, if_true, if_false, fall_through);
3934 SIMD128_TYPES(SIMD128_TYPE)
3935#undef SIMD128_TYPE
3936 // clang-format on
3937 } else {
3938 if (if_false != fall_through) __ b(if_false);
3939 }
3940 context()->Plug(if_true, if_false);
3941}
3942
3943
3944void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3945 Comment cmnt(masm_, "[ CompareOperation");
3946 SetExpressionPosition(expr);
3947
3948 // First we try a fast inlined version of the compare when one of
3949 // the operands is a literal.
3950 if (TryLiteralCompare(expr)) return;
3951
3952 // Always perform the comparison for its control flow. Pack the result
3953 // into the expression's context after the comparison is performed.
3954 Label materialize_true, materialize_false;
3955 Label* if_true = NULL;
3956 Label* if_false = NULL;
3957 Label* fall_through = NULL;
3958 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3959 &if_false, &fall_through);
3960
3961 Token::Value op = expr->op();
3962 VisitForStackValue(expr->left());
3963 switch (op) {
3964 case Token::IN:
3965 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003966 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003967 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3968 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3969 Split(eq, if_true, if_false, fall_through);
3970 break;
3971
3972 case Token::INSTANCEOF: {
3973 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003974 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003975 InstanceOfStub stub(isolate());
3976 __ CallStub(&stub);
3977 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3978 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3979 Split(eq, if_true, if_false, fall_through);
3980 break;
3981 }
3982
3983 default: {
3984 VisitForAccumulatorValue(expr->right());
3985 Condition cond = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003986 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003987
3988 bool inline_smi_code = ShouldInlineSmiCase(op);
3989 JumpPatchSite patch_site(masm_);
3990 if (inline_smi_code) {
3991 Label slow_case;
3992 __ orx(r5, r3, r4);
3993 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
3994 __ cmp(r4, r3);
3995 Split(cond, if_true, if_false, NULL);
3996 __ bind(&slow_case);
3997 }
3998
Ben Murdoch097c5b22016-05-18 11:27:45 +01003999 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004000 CallIC(ic, expr->CompareOperationFeedbackId());
4001 patch_site.EmitPatchInfo();
4002 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4003 __ cmpi(r3, Operand::Zero());
4004 Split(cond, if_true, if_false, fall_through);
4005 }
4006 }
4007
4008 // Convert the result of the comparison into one expected for this
4009 // expression's context.
4010 context()->Plug(if_true, if_false);
4011}
4012
4013
4014void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4015 Expression* sub_expr,
4016 NilValue nil) {
4017 Label materialize_true, materialize_false;
4018 Label* if_true = NULL;
4019 Label* if_false = NULL;
4020 Label* fall_through = NULL;
4021 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4022 &if_false, &fall_through);
4023
4024 VisitForAccumulatorValue(sub_expr);
4025 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4026 if (expr->op() == Token::EQ_STRICT) {
4027 Heap::RootListIndex nil_value = nil == kNullValue
4028 ? Heap::kNullValueRootIndex
4029 : Heap::kUndefinedValueRootIndex;
4030 __ LoadRoot(r4, nil_value);
4031 __ cmp(r3, r4);
4032 Split(eq, if_true, if_false, fall_through);
4033 } else {
4034 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4035 CallIC(ic, expr->CompareOperationFeedbackId());
4036 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
4037 Split(eq, if_true, if_false, fall_through);
4038 }
4039 context()->Plug(if_true, if_false);
4040}
4041
4042
4043void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4044 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4045 context()->Plug(r3);
4046}
4047
4048
4049Register FullCodeGenerator::result_register() { return r3; }
4050
4051
4052Register FullCodeGenerator::context_register() { return cp; }
4053
4054
4055void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4056 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
4057 __ StoreP(value, MemOperand(fp, frame_offset), r0);
4058}
4059
4060
4061void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4062 __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
4063}
4064
4065
4066void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4067 Scope* closure_scope = scope()->ClosureScope();
4068 if (closure_scope->is_script_scope() ||
4069 closure_scope->is_module_scope()) {
4070 // Contexts nested in the native context have a canonical empty function
4071 // as their closure, not the anonymous closure containing the global
4072 // code.
4073 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
4074 } else if (closure_scope->is_eval_scope()) {
4075 // Contexts created by a call to eval have the same closure as the
4076 // context calling eval, not the anonymous closure containing the eval
4077 // code. Fetch it from the context.
4078 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4079 } else {
4080 DCHECK(closure_scope->is_function_scope());
4081 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4082 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004083 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004084}
4085
4086
4087// ----------------------------------------------------------------------------
4088// Non-local control flow support.
4089
4090void FullCodeGenerator::EnterFinallyBlock() {
4091 DCHECK(!result_register().is(r4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004092 // Store pending message while executing finally block.
4093 ExternalReference pending_message_obj =
4094 ExternalReference::address_of_pending_message_obj(isolate());
4095 __ mov(ip, Operand(pending_message_obj));
4096 __ LoadP(r4, MemOperand(ip));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004097 PushOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004098
4099 ClearPendingMessage();
4100}
4101
4102
4103void FullCodeGenerator::ExitFinallyBlock() {
4104 DCHECK(!result_register().is(r4));
4105 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004106 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004107 ExternalReference pending_message_obj =
4108 ExternalReference::address_of_pending_message_obj(isolate());
4109 __ mov(ip, Operand(pending_message_obj));
4110 __ StoreP(r4, MemOperand(ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004111}
4112
4113
4114void FullCodeGenerator::ClearPendingMessage() {
4115 DCHECK(!result_register().is(r4));
4116 ExternalReference pending_message_obj =
4117 ExternalReference::address_of_pending_message_obj(isolate());
4118 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
4119 __ mov(ip, Operand(pending_message_obj));
4120 __ StoreP(r4, MemOperand(ip));
4121}
4122
4123
4124void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4125 DCHECK(!slot.IsInvalid());
4126 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
4127 Operand(SmiFromSlot(slot)));
4128}
4129
Ben Murdoch097c5b22016-05-18 11:27:45 +01004130void FullCodeGenerator::DeferredCommands::EmitCommands() {
4131 DCHECK(!result_register().is(r4));
4132 // Restore the accumulator (r3) and token (r4).
4133 __ Pop(r4, result_register());
4134 for (DeferredCommand cmd : commands_) {
4135 Label skip;
4136 __ CmpSmiLiteral(r4, Smi::FromInt(cmd.token), r0);
4137 __ bne(&skip);
4138 switch (cmd.command) {
4139 case kReturn:
4140 codegen_->EmitUnwindAndReturn();
4141 break;
4142 case kThrow:
4143 __ Push(result_register());
4144 __ CallRuntime(Runtime::kReThrow);
4145 break;
4146 case kContinue:
4147 codegen_->EmitContinue(cmd.target);
4148 break;
4149 case kBreak:
4150 codegen_->EmitBreak(cmd.target);
4151 break;
4152 }
4153 __ bind(&skip);
4154 }
4155}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004156
4157#undef __
4158
4159
4160void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
4161 BackEdgeState target_state,
4162 Code* replacement_code) {
4163 Address mov_address = Assembler::target_address_from_return_address(pc);
4164 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4165 Isolate* isolate = unoptimized_code->GetIsolate();
4166 CodePatcher patcher(isolate, cmp_address, 1);
4167
4168 switch (target_state) {
4169 case INTERRUPT: {
4170 // <decrement profiling counter>
4171 // cmpi r6, 0
4172 // bge <ok> ;; not changed
4173 // mov r12, <interrupt stub address>
4174 // mtlr r12
4175 // blrl
4176 // <reset profiling counter>
4177 // ok-label
4178 patcher.masm()->cmpi(r6, Operand::Zero());
4179 break;
4180 }
4181 case ON_STACK_REPLACEMENT:
4182 case OSR_AFTER_STACK_CHECK:
4183 // <decrement profiling counter>
4184 // crset
4185 // bge <ok> ;; not changed
4186 // mov r12, <on-stack replacement address>
4187 // mtlr r12
4188 // blrl
4189 // <reset profiling counter>
4190 // ok-label ----- pc_after points here
4191
4192 // Set the LT bit such that bge is a NOP
4193 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
4194 break;
4195 }
4196
4197 // Replace the stack check address in the mov sequence with the
4198 // entry address of the replacement code.
4199 Assembler::set_target_address_at(isolate, mov_address, unoptimized_code,
4200 replacement_code->entry());
4201
4202 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4203 unoptimized_code, mov_address, replacement_code);
4204}
4205
4206
4207BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4208 Isolate* isolate, Code* unoptimized_code, Address pc) {
4209 Address mov_address = Assembler::target_address_from_return_address(pc);
4210 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4211 Address interrupt_address =
4212 Assembler::target_address_at(mov_address, unoptimized_code);
4213
4214 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
4215 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
4216 return INTERRUPT;
4217 }
4218
4219 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
4220
4221 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
4222 return ON_STACK_REPLACEMENT;
4223 }
4224
4225 DCHECK(interrupt_address ==
4226 isolate->builtins()->OsrAfterStackCheck()->entry());
4227 return OSR_AFTER_STACK_CHECK;
4228}
4229} // namespace internal
4230} // namespace v8
4231#endif // V8_TARGET_ARCH_PPC