blob: 301ccf53cc3000e39aa0d915237119ab13036eb7 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_PPC
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/ppc/code-stubs-ppc.h"
17#include "src/ppc/macro-assembler-ppc.h"
18
19namespace v8 {
20namespace internal {
21
Ben Murdoch097c5b22016-05-18 11:27:45 +010022#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30// See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
31class JumpPatchSite BASE_EMBEDDED {
32 public:
33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
34#ifdef DEBUG
35 info_emitted_ = false;
36#endif
37 }
38
39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
40
41 // When initially emitting this ensure that a jump is always generated to skip
42 // the inlined smi code.
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 DCHECK(!patch_site_.is_bound() && !info_emitted_);
45 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
46 __ bind(&patch_site_);
47 __ cmp(reg, reg, cr0);
48 __ beq(target, cr0); // Always taken before patched.
49 }
50
51 // When initially emitting this ensure that a jump is never generated to skip
52 // the inlined smi code.
53 void EmitJumpIfSmi(Register reg, Label* target) {
54 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 __ bind(&patch_site_);
57 __ cmp(reg, reg, cr0);
58 __ bne(target, cr0); // Never taken before patched.
59 }
60
61 void EmitPatchInfo() {
62 if (patch_site_.is_bound()) {
63 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
64 Register reg;
65 // I believe this is using reg as the high bits of of the offset
66 reg.set_code(delta_to_patch_site / kOff16Mask);
67 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
68#ifdef DEBUG
69 info_emitted_ = true;
70#endif
71 } else {
72 __ nop(); // Signals no inlined code.
73 }
74 }
75
76 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010077 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 MacroAssembler* masm_;
79 Label patch_site_;
80#ifdef DEBUG
81 bool info_emitted_;
82#endif
83};
84
85
86// Generate code for a JS function. On entry to the function the receiver
87// and arguments have been pushed on the stack left to right. The actual
88// argument count matches the formal parameter count expected by the
89// function.
90//
91// The live registers are:
92// o r4: the JS function object being called (i.e., ourselves)
93// o r6: the new target value
94// o cp: our context
95// o fp: our caller's frame pointer (aka r31)
96// o sp: stack pointer
97// o lr: return address
98// o ip: our own function entry (required by the prologue)
99//
100// The function builds a JS frame. Please see JavaScriptFrameConstants in
101// frames-ppc.h for its layout.
102void FullCodeGenerator::Generate() {
103 CompilationInfo* info = info_;
104 profiling_counter_ = isolate()->factory()->NewCell(
105 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
106 SetFunctionPosition(literal());
107 Comment cmnt(masm_, "[ function compiled by full code generator");
108
109 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
112 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
113 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
114 __ AssertNotSmi(r5);
115 __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE);
116 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
117 }
118
119 // Open a frame scope to indicate that there is a frame on the stack. The
120 // MANUAL indicates that the scope shouldn't actually generate code to set up
121 // the frame (that is done below).
122 FrameScope frame_scope(masm_, StackFrame::MANUAL);
123 int prologue_offset = masm_->pc_offset();
124
125 if (prologue_offset) {
126 // Prologue logic requires it's starting address in ip and the
127 // corresponding offset from the function entry.
128 prologue_offset += Instruction::kInstrSize;
129 __ addi(ip, ip, Operand(prologue_offset));
130 }
131 info->set_prologue_offset(prologue_offset);
132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
133
134 {
135 Comment cmnt(masm_, "[ Allocate locals");
136 int locals_count = info->scope()->num_stack_slots();
137 // Generators allocate locals, if any, in context slots.
138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140 if (locals_count > 0) {
141 if (locals_count >= 128) {
142 Label ok;
143 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
145 __ cmpl(ip, r5);
146 __ bc_short(ge, &ok);
147 __ CallRuntime(Runtime::kThrowStackOverflow);
148 __ bind(&ok);
149 }
150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
152 if (locals_count >= kMaxPushes) {
153 int loop_iterations = locals_count / kMaxPushes;
154 __ mov(r5, Operand(loop_iterations));
155 __ mtctr(r5);
156 Label loop_header;
157 __ bind(&loop_header);
158 // Do pushes.
159 for (int i = 0; i < kMaxPushes; i++) {
160 __ push(ip);
161 }
162 // Continue loop if not done.
163 __ bdnz(&loop_header);
164 }
165 int remaining = locals_count % kMaxPushes;
166 // Emit the remaining pushes.
167 for (int i = 0; i < remaining; i++) {
168 __ push(ip);
169 }
170 }
171 }
172
173 bool function_in_register_r4 = true;
174
175 // Possibly allocate a local context.
176 if (info->scope()->num_heap_slots() > 0) {
177 // Argument to NewContext is the function, which is still in r4.
178 Comment cmnt(masm_, "[ Allocate context");
179 bool need_write_barrier = true;
180 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
181 if (info->scope()->is_script_scope()) {
182 __ push(r4);
183 __ Push(info->scope()->GetScopeInfo(info->isolate()));
184 __ CallRuntime(Runtime::kNewScriptContext);
185 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
186 // The new target value is not used, clobbering is safe.
187 DCHECK_NULL(info->scope()->new_target_var());
188 } else {
189 if (info->scope()->new_target_var() != nullptr) {
190 __ push(r6); // Preserve new target.
191 }
192 if (slots <= FastNewContextStub::kMaximumSlots) {
193 FastNewContextStub stub(isolate(), slots);
194 __ CallStub(&stub);
195 // Result of FastNewContextStub is always in new space.
196 need_write_barrier = false;
197 } else {
198 __ push(r4);
199 __ CallRuntime(Runtime::kNewFunctionContext);
200 }
201 if (info->scope()->new_target_var() != nullptr) {
202 __ pop(r6); // Preserve new target.
203 }
204 }
205 function_in_register_r4 = false;
206 // Context is returned in r3. It replaces the context passed to us.
207 // It's saved in the stack and kept live in cp.
208 __ mr(cp, r3);
209 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
210 // Copy any necessary parameters into the context.
211 int num_parameters = info->scope()->num_parameters();
212 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
213 for (int i = first_parameter; i < num_parameters; i++) {
214 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
215 if (var->IsContextSlot()) {
216 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
217 (num_parameters - 1 - i) * kPointerSize;
218 // Load parameter from stack.
219 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
220 // Store it in the context.
221 MemOperand target = ContextMemOperand(cp, var->index());
222 __ StoreP(r3, target, r0);
223
224 // Update the write barrier.
225 if (need_write_barrier) {
226 __ RecordWriteContextSlot(cp, target.offset(), r3, r5,
227 kLRHasBeenSaved, kDontSaveFPRegs);
228 } else if (FLAG_debug_code) {
229 Label done;
230 __ JumpIfInNewSpace(cp, r3, &done);
231 __ Abort(kExpectedNewSpaceObject);
232 __ bind(&done);
233 }
234 }
235 }
236 }
237
238 // Register holding this function and new target are both trashed in case we
239 // bailout here. But since that can happen only when new target is not used
240 // and we allocate a context, the value of |function_in_register| is correct.
241 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
242
243 // Possibly set up a local binding to the this function which is used in
244 // derived constructors with super calls.
245 Variable* this_function_var = scope()->this_function_var();
246 if (this_function_var != nullptr) {
247 Comment cmnt(masm_, "[ This function");
248 if (!function_in_register_r4) {
249 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
250 // The write barrier clobbers register again, keep it marked as such.
251 }
252 SetVar(this_function_var, r4, r3, r5);
253 }
254
255 // Possibly set up a local binding to the new target value.
256 Variable* new_target_var = scope()->new_target_var();
257 if (new_target_var != nullptr) {
258 Comment cmnt(masm_, "[ new.target");
259 SetVar(new_target_var, r6, r3, r5);
260 }
261
262 // Possibly allocate RestParameters
263 int rest_index;
264 Variable* rest_param = scope()->rest_parameter(&rest_index);
265 if (rest_param) {
266 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100267 if (!function_in_register_r4) {
268 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
269 }
270 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100272 function_in_register_r4 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 SetVar(rest_param, r3, r4, r5);
274 }
275
276 Variable* arguments = scope()->arguments();
277 if (arguments != NULL) {
278 // Function uses arguments object.
279 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280 if (!function_in_register_r4) {
281 // Load this again, if it's used by the local context below.
282 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
283 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100284 if (is_strict(language_mode()) || !has_simple_parameters()) {
285 FastNewStrictArgumentsStub stub(isolate());
286 __ CallStub(&stub);
287 } else if (literal()->has_duplicate_parameters()) {
288 __ Push(r4);
289 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
290 } else {
291 FastNewSloppyArgumentsStub stub(isolate());
292 __ CallStub(&stub);
293 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294
295 SetVar(arguments, r3, r4, r5);
296 }
297
298 if (FLAG_trace) {
299 __ CallRuntime(Runtime::kTraceEnter);
300 }
301
Ben Murdochda12d292016-06-02 14:46:10 +0100302 // Visit the declarations and body.
303 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
304 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100306 VisitDeclarations(scope()->declarations());
307 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000308
Ben Murdochda12d292016-06-02 14:46:10 +0100309 // Assert that the declarations do not use ICs. Otherwise the debugger
310 // won't be able to redirect a PC at an IC to the correct IC in newly
311 // recompiled code.
312 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000313
Ben Murdochda12d292016-06-02 14:46:10 +0100314 {
315 Comment cmnt(masm_, "[ Stack check");
316 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
317 Label ok;
318 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
319 __ cmpl(sp, ip);
320 __ bc_short(ge, &ok);
321 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
322 __ bind(&ok);
323 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324
Ben Murdochda12d292016-06-02 14:46:10 +0100325 {
326 Comment cmnt(masm_, "[ Body");
327 DCHECK(loop_depth() == 0);
328 VisitStatements(literal()->body());
329 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 }
331
332 // Always emit a 'return undefined' in case control fell off the end of
333 // the body.
334 {
335 Comment cmnt(masm_, "[ return <undefined>;");
336 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
337 }
338 EmitReturnSequence();
339
340 if (HasStackOverflow()) {
341 masm_->AbortConstantPoolBuilding();
342 }
343}
344
345
346void FullCodeGenerator::ClearAccumulator() {
347 __ LoadSmiLiteral(r3, Smi::FromInt(0));
348}
349
350
351void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
352 __ mov(r5, Operand(profiling_counter_));
353 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
354 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
355 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
356}
357
358
359void FullCodeGenerator::EmitProfilingCounterReset() {
360 int reset_value = FLAG_interrupt_budget;
361 __ mov(r5, Operand(profiling_counter_));
362 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
363 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
364}
365
366
367void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
368 Label* back_edge_target) {
369 Comment cmnt(masm_, "[ Back edge bookkeeping");
370 Label ok;
371
372 DCHECK(back_edge_target->is_bound());
373 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
374 kCodeSizeMultiplier / 2;
375 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
376 EmitProfilingCounterDecrement(weight);
377 {
378 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
379 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
380 // BackEdgeTable::PatchAt manipulates this sequence.
381 __ cmpi(r6, Operand::Zero());
382 __ bc_short(ge, &ok);
383 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
384
385 // Record a mapping of this PC offset to the OSR id. This is used to find
386 // the AST id from the unoptimized code in order to use it as a key into
387 // the deoptimization input data found in the optimized code.
388 RecordBackEdge(stmt->OsrEntryId());
389 }
390 EmitProfilingCounterReset();
391
392 __ bind(&ok);
393 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
394 // Record a mapping of the OSR id to this PC. This is used if the OSR
395 // entry becomes the target of a bailout. We don't expect it to be, but
396 // we want it to work if it is.
397 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
398}
399
Ben Murdoch097c5b22016-05-18 11:27:45 +0100400void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
401 bool is_tail_call) {
402 // Pretend that the exit is a backwards jump to the entry.
403 int weight = 1;
404 if (info_->ShouldSelfOptimize()) {
405 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
406 } else {
407 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
408 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
409 }
410 EmitProfilingCounterDecrement(weight);
411 Label ok;
412 __ cmpi(r6, Operand::Zero());
413 __ bge(&ok);
414 // Don't need to save result register if we are going to do a tail call.
415 if (!is_tail_call) {
416 __ push(r3);
417 }
418 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
419 if (!is_tail_call) {
420 __ pop(r3);
421 }
422 EmitProfilingCounterReset();
423 __ bind(&ok);
424}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000425
426void FullCodeGenerator::EmitReturnSequence() {
427 Comment cmnt(masm_, "[ Return sequence");
428 if (return_label_.is_bound()) {
429 __ b(&return_label_);
430 } else {
431 __ bind(&return_label_);
432 if (FLAG_trace) {
433 // Push the return value on the stack as the parameter.
434 // Runtime::TraceExit returns its parameter in r3
435 __ push(r3);
436 __ CallRuntime(Runtime::kTraceExit);
437 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100438 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000439
440 // Make sure that the constant pool is not emitted inside of the return
441 // sequence.
442 {
443 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
444 int32_t arg_count = info_->scope()->num_parameters() + 1;
445 int32_t sp_delta = arg_count * kPointerSize;
446 SetReturnPosition(literal());
447 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
448 __ blr();
449 }
450 }
451}
452
453
454void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
455 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
456 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100457 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000458}
459
460
461void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
462
463
464void FullCodeGenerator::AccumulatorValueContext::Plug(
465 Heap::RootListIndex index) const {
466 __ LoadRoot(result_register(), index);
467}
468
469
470void FullCodeGenerator::StackValueContext::Plug(
471 Heap::RootListIndex index) const {
472 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100473 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000474}
475
476
477void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
478 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
479 false_label_);
480 if (index == Heap::kUndefinedValueRootIndex ||
481 index == Heap::kNullValueRootIndex ||
482 index == Heap::kFalseValueRootIndex) {
483 if (false_label_ != fall_through_) __ b(false_label_);
484 } else if (index == Heap::kTrueValueRootIndex) {
485 if (true_label_ != fall_through_) __ b(true_label_);
486 } else {
487 __ LoadRoot(result_register(), index);
488 codegen()->DoTest(this);
489 }
490}
491
492
493void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
494
495
496void FullCodeGenerator::AccumulatorValueContext::Plug(
497 Handle<Object> lit) const {
498 __ mov(result_register(), Operand(lit));
499}
500
501
502void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
503 // Immediates cannot be pushed directly.
504 __ mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100505 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000506}
507
508
509void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
510 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
511 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100512 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
514 if (false_label_ != fall_through_) __ b(false_label_);
515 } else if (lit->IsTrue() || lit->IsJSObject()) {
516 if (true_label_ != fall_through_) __ b(true_label_);
517 } else if (lit->IsString()) {
518 if (String::cast(*lit)->length() == 0) {
519 if (false_label_ != fall_through_) __ b(false_label_);
520 } else {
521 if (true_label_ != fall_through_) __ b(true_label_);
522 }
523 } else if (lit->IsSmi()) {
524 if (Smi::cast(*lit)->value() == 0) {
525 if (false_label_ != fall_through_) __ b(false_label_);
526 } else {
527 if (true_label_ != fall_through_) __ b(true_label_);
528 }
529 } else {
530 // For simplicity we always test the accumulator register.
531 __ mov(result_register(), Operand(lit));
532 codegen()->DoTest(this);
533 }
534}
535
536
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000537void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
538 Register reg) const {
539 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100540 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541 __ StoreP(reg, MemOperand(sp, 0));
542}
543
544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
546 Label* materialize_false) const {
547 DCHECK(materialize_true == materialize_false);
548 __ bind(materialize_true);
549}
550
551
552void FullCodeGenerator::AccumulatorValueContext::Plug(
553 Label* materialize_true, Label* materialize_false) const {
554 Label done;
555 __ bind(materialize_true);
556 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
557 __ b(&done);
558 __ bind(materialize_false);
559 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
560 __ bind(&done);
561}
562
563
564void FullCodeGenerator::StackValueContext::Plug(
565 Label* materialize_true, Label* materialize_false) const {
566 Label done;
567 __ bind(materialize_true);
568 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
569 __ b(&done);
570 __ bind(materialize_false);
571 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
572 __ bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100573 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574}
575
576
577void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
578 Label* materialize_false) const {
579 DCHECK(materialize_true == true_label_);
580 DCHECK(materialize_false == false_label_);
581}
582
583
584void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
585 Heap::RootListIndex value_root_index =
586 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
587 __ LoadRoot(result_register(), value_root_index);
588}
589
590
591void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
592 Heap::RootListIndex value_root_index =
593 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
594 __ LoadRoot(ip, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100595 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000596}
597
598
599void FullCodeGenerator::TestContext::Plug(bool flag) const {
600 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
601 false_label_);
602 if (flag) {
603 if (true_label_ != fall_through_) __ b(true_label_);
604 } else {
605 if (false_label_ != fall_through_) __ b(false_label_);
606 }
607}
608
609
610void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
611 Label* if_false, Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100612 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000613 CallIC(ic, condition->test_id());
614 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
615 Split(eq, if_true, if_false, fall_through);
616}
617
618
619void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
620 Label* fall_through, CRegister cr) {
621 if (if_false == fall_through) {
622 __ b(cond, if_true, cr);
623 } else if (if_true == fall_through) {
624 __ b(NegateCondition(cond), if_false, cr);
625 } else {
626 __ b(cond, if_true, cr);
627 __ b(if_false);
628 }
629}
630
631
632MemOperand FullCodeGenerator::StackOperand(Variable* var) {
633 DCHECK(var->IsStackAllocated());
634 // Offset is negative because higher indexes are at lower addresses.
635 int offset = -var->index() * kPointerSize;
636 // Adjust by a (parameter or local) base offset.
637 if (var->IsParameter()) {
638 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
639 } else {
640 offset += JavaScriptFrameConstants::kLocal0Offset;
641 }
642 return MemOperand(fp, offset);
643}
644
645
646MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
647 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
648 if (var->IsContextSlot()) {
649 int context_chain_length = scope()->ContextChainLength(var->scope());
650 __ LoadContext(scratch, context_chain_length);
651 return ContextMemOperand(scratch, var->index());
652 } else {
653 return StackOperand(var);
654 }
655}
656
657
658void FullCodeGenerator::GetVar(Register dest, Variable* var) {
659 // Use destination as scratch.
660 MemOperand location = VarOperand(var, dest);
661 __ LoadP(dest, location, r0);
662}
663
664
665void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
666 Register scratch1) {
667 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
668 DCHECK(!scratch0.is(src));
669 DCHECK(!scratch0.is(scratch1));
670 DCHECK(!scratch1.is(src));
671 MemOperand location = VarOperand(var, scratch0);
672 __ StoreP(src, location, r0);
673
674 // Emit the write barrier code if the location is in the heap.
675 if (var->IsContextSlot()) {
676 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
677 kLRHasBeenSaved, kDontSaveFPRegs);
678 }
679}
680
681
682void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
683 bool should_normalize,
684 Label* if_true,
685 Label* if_false) {
686 // Only prepare for bailouts before splits if we're in a test
687 // context. Otherwise, we let the Visit function deal with the
688 // preparation to avoid preparing with the same AST id twice.
689 if (!context()->IsTest()) return;
690
691 Label skip;
692 if (should_normalize) __ b(&skip);
693 PrepareForBailout(expr, TOS_REG);
694 if (should_normalize) {
695 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
696 __ cmp(r3, ip);
697 Split(eq, if_true, if_false, NULL);
698 __ bind(&skip);
699 }
700}
701
702
703void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
704 // The variable in the declaration always resides in the current function
705 // context.
706 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100707 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000708 // Check that we're not inside a with or catch context.
709 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
710 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
711 __ Check(ne, kDeclarationInWithContext);
712 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
713 __ Check(ne, kDeclarationInCatchContext);
714 }
715}
716
717
718void FullCodeGenerator::VisitVariableDeclaration(
719 VariableDeclaration* declaration) {
720 // If it was not possible to allocate the variable at compile time, we
721 // need to "declare" it at runtime to make sure it actually exists in the
722 // local context.
723 VariableProxy* proxy = declaration->proxy();
724 VariableMode mode = declaration->mode();
725 Variable* variable = proxy->var();
726 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
727 switch (variable->location()) {
728 case VariableLocation::GLOBAL:
729 case VariableLocation::UNALLOCATED:
730 globals_->Add(variable->name(), zone());
731 globals_->Add(variable->binding_needs_init()
732 ? isolate()->factory()->the_hole_value()
733 : isolate()->factory()->undefined_value(),
734 zone());
735 break;
736
737 case VariableLocation::PARAMETER:
738 case VariableLocation::LOCAL:
739 if (hole_init) {
740 Comment cmnt(masm_, "[ VariableDeclaration");
741 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
742 __ StoreP(ip, StackOperand(variable));
743 }
744 break;
745
746 case VariableLocation::CONTEXT:
747 if (hole_init) {
748 Comment cmnt(masm_, "[ VariableDeclaration");
749 EmitDebugCheckDeclarationContext(variable);
750 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
751 __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0);
752 // No write barrier since the_hole_value is in old space.
753 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
754 }
755 break;
756
757 case VariableLocation::LOOKUP: {
758 Comment cmnt(masm_, "[ VariableDeclaration");
759 __ mov(r5, Operand(variable->name()));
760 // Declaration nodes are always introduced in one of four modes.
761 DCHECK(IsDeclaredVariableMode(mode));
762 // Push initial value, if any.
763 // Note: For variables we must not push an initial value (such as
764 // 'undefined') because we may have a (legal) redeclaration and we
765 // must not destroy the current value.
766 if (hole_init) {
767 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
768 } else {
769 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
770 }
771 __ Push(r5, r3);
772 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
773 __ CallRuntime(Runtime::kDeclareLookupSlot);
774 break;
775 }
776 }
777}
778
779
780void FullCodeGenerator::VisitFunctionDeclaration(
781 FunctionDeclaration* declaration) {
782 VariableProxy* proxy = declaration->proxy();
783 Variable* variable = proxy->var();
784 switch (variable->location()) {
785 case VariableLocation::GLOBAL:
786 case VariableLocation::UNALLOCATED: {
787 globals_->Add(variable->name(), zone());
788 Handle<SharedFunctionInfo> function =
789 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
790 // Check for stack-overflow exception.
791 if (function.is_null()) return SetStackOverflow();
792 globals_->Add(function, zone());
793 break;
794 }
795
796 case VariableLocation::PARAMETER:
797 case VariableLocation::LOCAL: {
798 Comment cmnt(masm_, "[ FunctionDeclaration");
799 VisitForAccumulatorValue(declaration->fun());
800 __ StoreP(result_register(), StackOperand(variable));
801 break;
802 }
803
804 case VariableLocation::CONTEXT: {
805 Comment cmnt(masm_, "[ FunctionDeclaration");
806 EmitDebugCheckDeclarationContext(variable);
807 VisitForAccumulatorValue(declaration->fun());
808 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()),
809 r0);
810 int offset = Context::SlotOffset(variable->index());
811 // We know that we have written a function, which is not a smi.
812 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
813 kLRHasBeenSaved, kDontSaveFPRegs,
814 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
815 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
816 break;
817 }
818
819 case VariableLocation::LOOKUP: {
820 Comment cmnt(masm_, "[ FunctionDeclaration");
821 __ mov(r5, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100822 PushOperand(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000823 // Push initial value for function declaration.
824 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100825 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
826 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000827 break;
828 }
829 }
830}
831
832
833void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
834 // Call the runtime to declare the globals.
835 __ mov(r4, Operand(pairs));
836 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
837 __ Push(r4, r3);
838 __ CallRuntime(Runtime::kDeclareGlobals);
839 // Return value is ignored.
840}
841
842
843void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
844 // Call the runtime to declare the modules.
845 __ Push(descriptions);
846 __ CallRuntime(Runtime::kDeclareModules);
847 // Return value is ignored.
848}
849
850
851void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
852 Comment cmnt(masm_, "[ SwitchStatement");
853 Breakable nested_statement(this, stmt);
854 SetStatementPosition(stmt);
855
856 // Keep the switch value on the stack until a case matches.
857 VisitForStackValue(stmt->tag());
858 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
859
860 ZoneList<CaseClause*>* clauses = stmt->cases();
861 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
862
863 Label next_test; // Recycled for each test.
864 // Compile all the tests with branches to their bodies.
865 for (int i = 0; i < clauses->length(); i++) {
866 CaseClause* clause = clauses->at(i);
867 clause->body_target()->Unuse();
868
869 // The default is not a test, but remember it as final fall through.
870 if (clause->is_default()) {
871 default_clause = clause;
872 continue;
873 }
874
875 Comment cmnt(masm_, "[ Case comparison");
876 __ bind(&next_test);
877 next_test.Unuse();
878
879 // Compile the label expression.
880 VisitForAccumulatorValue(clause->label());
881
882 // Perform the comparison as if via '==='.
883 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
884 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
885 JumpPatchSite patch_site(masm_);
886 if (inline_smi_code) {
887 Label slow_case;
888 __ orx(r5, r4, r3);
889 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
890
891 __ cmp(r4, r3);
892 __ bne(&next_test);
893 __ Drop(1); // Switch value is no longer needed.
894 __ b(clause->body_target());
895 __ bind(&slow_case);
896 }
897
898 // Record position before stub call for type feedback.
899 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100900 Handle<Code> ic =
901 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000902 CallIC(ic, clause->CompareId());
903 patch_site.EmitPatchInfo();
904
905 Label skip;
906 __ b(&skip);
907 PrepareForBailout(clause, TOS_REG);
908 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
909 __ cmp(r3, ip);
910 __ bne(&next_test);
911 __ Drop(1);
912 __ b(clause->body_target());
913 __ bind(&skip);
914
915 __ cmpi(r3, Operand::Zero());
916 __ bne(&next_test);
917 __ Drop(1); // Switch value is no longer needed.
918 __ b(clause->body_target());
919 }
920
921 // Discard the test value and jump to the default if present, otherwise to
922 // the end of the statement.
923 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100924 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000925 if (default_clause == NULL) {
926 __ b(nested_statement.break_label());
927 } else {
928 __ b(default_clause->body_target());
929 }
930
931 // Compile all the case bodies.
932 for (int i = 0; i < clauses->length(); i++) {
933 Comment cmnt(masm_, "[ Case body");
934 CaseClause* clause = clauses->at(i);
935 __ bind(clause->body_target());
936 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
937 VisitStatements(clause->statements());
938 }
939
940 __ bind(nested_statement.break_label());
941 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
942}
943
944
945void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
946 Comment cmnt(masm_, "[ ForInStatement");
947 SetStatementPosition(stmt, SKIP_BREAK);
948
949 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
950
Ben Murdoch097c5b22016-05-18 11:27:45 +0100951 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000952 SetExpressionAsStatementPosition(stmt->enumerable());
953 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100954 OperandStackDepthIncrement(5);
955
956 Label loop, exit;
957 Iteration loop_statement(this, stmt);
958 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000959
Ben Murdoch097c5b22016-05-18 11:27:45 +0100960 // If the object is null or undefined, skip over the loop, otherwise convert
961 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000962 Label convert, done_convert;
963 __ JumpIfSmi(r3, &convert);
964 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
965 __ bge(&done_convert);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100966 __ CompareRoot(r3, Heap::kNullValueRootIndex);
967 __ beq(&exit);
968 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
969 __ beq(&exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000970 __ bind(&convert);
971 ToObjectStub stub(isolate());
972 __ CallStub(&stub);
973 __ bind(&done_convert);
974 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
975 __ push(r3);
976
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000977 // Check cache validity in generated code. This is a fast case for
978 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
979 // guarantee cache validity, call the runtime system to check cache
980 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100981 // Note: Proxies never have an enum cache, so will always take the
982 // slow path.
983 Label call_runtime;
984 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000985
986 // The enum cache is valid. Load the map of the object being
987 // iterated over and use the cache for the iteration.
988 Label use_cache;
989 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
990 __ b(&use_cache);
991
992 // Get the set of properties to enumerate.
993 __ bind(&call_runtime);
994 __ push(r3); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100995 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
997
998 // If we got a map from the runtime call, we can do a fast
999 // modification check. Otherwise, we got a fixed array, and we have
1000 // to do a slow check.
1001 Label fixed_array;
1002 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1003 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1004 __ cmp(r5, ip);
1005 __ bne(&fixed_array);
1006
1007 // We got a map in register r3. Get the enumeration cache from it.
1008 Label no_descriptors;
1009 __ bind(&use_cache);
1010
1011 __ EnumLength(r4, r3);
1012 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1013 __ beq(&no_descriptors);
1014
1015 __ LoadInstanceDescriptors(r3, r5);
1016 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1017 __ LoadP(r5,
1018 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1019
1020 // Set up the four remaining stack slots.
1021 __ push(r3); // Map.
1022 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1023 // Push enumeration cache, enumeration cache length (as smi) and zero.
1024 __ Push(r5, r4, r3);
1025 __ b(&loop);
1026
1027 __ bind(&no_descriptors);
1028 __ Drop(1);
1029 __ b(&exit);
1030
1031 // We got a fixed array in register r3. Iterate through that.
1032 __ bind(&fixed_array);
1033
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001034 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi(1) indicates slow check
1035 __ Push(r4, r3); // Smi and array
1036 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001037 __ Push(r4); // Fixed array length (as smi).
1038 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001039 __ LoadSmiLiteral(r3, Smi::FromInt(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001040 __ Push(r3); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001041
1042 // Generate code for doing the condition check.
1043 __ bind(&loop);
1044 SetExpressionAsStatementPosition(stmt->each());
1045
1046 // Load the current count to r3, load the length to r4.
1047 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1048 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1049 __ cmpl(r3, r4); // Compare to the array length.
1050 __ bge(loop_statement.break_label());
1051
1052 // Get the current entry of the array into register r6.
1053 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1054 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1055 __ SmiToPtrArrayOffset(r6, r3);
1056 __ LoadPX(r6, MemOperand(r6, r5));
1057
1058 // Get the expected map from the stack or a smi in the
1059 // permanent slow case into register r5.
1060 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1061
1062 // Check if the expected map still matches that of the enumerable.
1063 // If not, we may have to filter the key.
1064 Label update_each;
1065 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1066 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1067 __ cmp(r7, r5);
1068 __ beq(&update_each);
1069
Ben Murdochda12d292016-06-02 14:46:10 +01001070 // We need to filter the key, record slow-path here.
1071 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001072 __ EmitLoadTypeFeedbackVector(r3);
1073 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1074 __ StoreP(
1075 r5, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0);
1076
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 // Convert the entry to a string or (smi) 0 if it isn't a property
1078 // any more. If the property has been removed while iterating, we
1079 // just skip it.
1080 __ Push(r4, r6); // Enumerable and current entry.
1081 __ CallRuntime(Runtime::kForInFilter);
1082 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1083 __ mr(r6, r3);
1084 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1085 __ cmp(r3, r0);
1086 __ beq(loop_statement.continue_label());
1087
1088 // Update the 'each' property or variable from the possibly filtered
1089 // entry in register r6.
1090 __ bind(&update_each);
1091 __ mr(result_register(), r6);
1092 // Perform the assignment as if via '='.
1093 {
1094 EffectContext context(this);
1095 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1096 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1097 }
1098
1099 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1100 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1101 // Generate code for the body of the loop.
1102 Visit(stmt->body());
1103
1104 // Generate code for the going to the next element by incrementing
1105 // the index (smi) stored on top of the stack.
1106 __ bind(loop_statement.continue_label());
1107 __ pop(r3);
1108 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1109 __ push(r3);
1110
1111 EmitBackEdgeBookkeeping(stmt, &loop);
1112 __ b(&loop);
1113
1114 // Remove the pointers stored on the stack.
1115 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001116 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001117
1118 // Exit and decrement the loop depth.
1119 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1120 __ bind(&exit);
1121 decrement_loop_depth();
1122}
1123
1124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001125void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1126 FeedbackVectorSlot slot) {
1127 DCHECK(NeedsHomeObject(initializer));
1128 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1129 __ mov(StoreDescriptor::NameRegister(),
1130 Operand(isolate()->factory()->home_object_symbol()));
1131 __ LoadP(StoreDescriptor::ValueRegister(),
1132 MemOperand(sp, offset * kPointerSize));
1133 EmitLoadStoreICSlot(slot);
1134 CallStoreIC();
1135}
1136
1137
1138void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1139 int offset,
1140 FeedbackVectorSlot slot) {
1141 DCHECK(NeedsHomeObject(initializer));
1142 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1143 __ mov(StoreDescriptor::NameRegister(),
1144 Operand(isolate()->factory()->home_object_symbol()));
1145 __ LoadP(StoreDescriptor::ValueRegister(),
1146 MemOperand(sp, offset * kPointerSize));
1147 EmitLoadStoreICSlot(slot);
1148 CallStoreIC();
1149}
1150
1151
1152void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1153 TypeofMode typeof_mode,
1154 Label* slow) {
1155 Register current = cp;
1156 Register next = r4;
1157 Register temp = r5;
1158
1159 Scope* s = scope();
1160 while (s != NULL) {
1161 if (s->num_heap_slots() > 0) {
1162 if (s->calls_sloppy_eval()) {
1163 // Check that extension is "the hole".
1164 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1165 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1166 }
1167 // Load next context in chain.
1168 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1169 // Walk the rest of the chain without clobbering cp.
1170 current = next;
1171 }
1172 // If no outer scope calls eval, we do not need to check more
1173 // context extensions.
1174 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1175 s = s->outer_scope();
1176 }
1177
1178 if (s->is_eval_scope()) {
1179 Label loop, fast;
1180 if (!current.is(next)) {
1181 __ Move(next, current);
1182 }
1183 __ bind(&loop);
1184 // Terminate at native context.
1185 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1186 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1187 __ cmp(temp, ip);
1188 __ beq(&fast);
1189 // Check that extension is "the hole".
1190 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1191 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1192 // Load next context in chain.
1193 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1194 __ b(&loop);
1195 __ bind(&fast);
1196 }
1197
1198 // All extension objects were empty and it is safe to use a normal global
1199 // load machinery.
1200 EmitGlobalVariableLoad(proxy, typeof_mode);
1201}
1202
1203
1204MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1205 Label* slow) {
1206 DCHECK(var->IsContextSlot());
1207 Register context = cp;
1208 Register next = r6;
1209 Register temp = r7;
1210
1211 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1212 if (s->num_heap_slots() > 0) {
1213 if (s->calls_sloppy_eval()) {
1214 // Check that extension is "the hole".
1215 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1216 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1217 }
1218 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1219 // Walk the rest of the chain without clobbering cp.
1220 context = next;
1221 }
1222 }
1223 // Check that last extension is "the hole".
1224 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1225 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1226
1227 // This function is used only for loads, not stores, so it's safe to
1228 // return an cp-based operand (the write barrier cannot be allowed to
1229 // destroy the cp register).
1230 return ContextMemOperand(context, var->index());
1231}
1232
1233
1234void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1235 TypeofMode typeof_mode,
1236 Label* slow, Label* done) {
1237 // Generate fast-case code for variables that might be shadowed by
1238 // eval-introduced variables. Eval is used a lot without
1239 // introducing variables. In those cases, we do not want to
1240 // perform a runtime call for all variables in the scope
1241 // containing the eval.
1242 Variable* var = proxy->var();
1243 if (var->mode() == DYNAMIC_GLOBAL) {
1244 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1245 __ b(done);
1246 } else if (var->mode() == DYNAMIC_LOCAL) {
1247 Variable* local = var->local_if_not_shadowed();
1248 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1249 if (local->mode() == LET || local->mode() == CONST ||
1250 local->mode() == CONST_LEGACY) {
1251 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1252 __ bne(done);
1253 if (local->mode() == CONST_LEGACY) {
1254 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1255 } else { // LET || CONST
1256 __ mov(r3, Operand(var->name()));
1257 __ push(r3);
1258 __ CallRuntime(Runtime::kThrowReferenceError);
1259 }
1260 }
1261 __ b(done);
1262 }
1263}
1264
1265
1266void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1267 TypeofMode typeof_mode) {
1268 Variable* var = proxy->var();
1269 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1270 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1271 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1272 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1273 __ mov(LoadDescriptor::SlotRegister(),
1274 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1275 CallLoadIC(typeof_mode);
1276}
1277
1278
1279void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1280 TypeofMode typeof_mode) {
1281 // Record position before possible IC call.
1282 SetExpressionPosition(proxy);
1283 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1284 Variable* var = proxy->var();
1285
1286 // Three cases: global variables, lookup variables, and all other types of
1287 // variables.
1288 switch (var->location()) {
1289 case VariableLocation::GLOBAL:
1290 case VariableLocation::UNALLOCATED: {
1291 Comment cmnt(masm_, "[ Global variable");
1292 EmitGlobalVariableLoad(proxy, typeof_mode);
1293 context()->Plug(r3);
1294 break;
1295 }
1296
1297 case VariableLocation::PARAMETER:
1298 case VariableLocation::LOCAL:
1299 case VariableLocation::CONTEXT: {
1300 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1301 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1302 : "[ Stack variable");
1303 if (NeedsHoleCheckForLoad(proxy)) {
1304 Label done;
1305 // Let and const need a read barrier.
1306 GetVar(r3, var);
1307 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1308 __ bne(&done);
1309 if (var->mode() == LET || var->mode() == CONST) {
1310 // Throw a reference error when using an uninitialized let/const
1311 // binding in harmony mode.
1312 __ mov(r3, Operand(var->name()));
1313 __ push(r3);
1314 __ CallRuntime(Runtime::kThrowReferenceError);
1315 } else {
1316 // Uninitialized legacy const bindings are unholed.
1317 DCHECK(var->mode() == CONST_LEGACY);
1318 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1319 }
1320 __ bind(&done);
1321 context()->Plug(r3);
1322 break;
1323 }
1324 context()->Plug(var);
1325 break;
1326 }
1327
1328 case VariableLocation::LOOKUP: {
1329 Comment cmnt(masm_, "[ Lookup variable");
1330 Label done, slow;
1331 // Generate code for loading from variables potentially shadowed
1332 // by eval-introduced variables.
1333 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1334 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001335 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336 Runtime::FunctionId function_id =
1337 typeof_mode == NOT_INSIDE_TYPEOF
1338 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001339 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001340 __ CallRuntime(function_id);
1341 __ bind(&done);
1342 context()->Plug(r3);
1343 }
1344 }
1345}
1346
1347
1348void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1349 Comment cmnt(masm_, "[ RegExpLiteral");
1350 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1351 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1352 __ mov(r4, Operand(expr->pattern()));
1353 __ LoadSmiLiteral(r3, Smi::FromInt(expr->flags()));
1354 FastCloneRegExpStub stub(isolate());
1355 __ CallStub(&stub);
1356 context()->Plug(r3);
1357}
1358
1359
1360void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1361 Expression* expression = (property == NULL) ? NULL : property->value();
1362 if (expression == NULL) {
1363 __ LoadRoot(r4, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001364 PushOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001365 } else {
1366 VisitForStackValue(expression);
1367 if (NeedsHomeObject(expression)) {
1368 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1369 property->kind() == ObjectLiteral::Property::SETTER);
1370 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1371 EmitSetHomeObject(expression, offset, property->GetSlot());
1372 }
1373 }
1374}
1375
1376
1377void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1378 Comment cmnt(masm_, "[ ObjectLiteral");
1379
1380 Handle<FixedArray> constant_properties = expr->constant_properties();
1381 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1382 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1383 __ mov(r4, Operand(constant_properties));
1384 int flags = expr->ComputeFlags();
1385 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1386 if (MustCreateObjectLiteralWithRuntime(expr)) {
1387 __ Push(r6, r5, r4, r3);
1388 __ CallRuntime(Runtime::kCreateObjectLiteral);
1389 } else {
1390 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1391 __ CallStub(&stub);
1392 }
1393 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1394
1395 // If result_saved is true the result is on top of the stack. If
1396 // result_saved is false the result is in r3.
1397 bool result_saved = false;
1398
1399 AccessorTable accessor_table(zone());
1400 int property_index = 0;
1401 for (; property_index < expr->properties()->length(); property_index++) {
1402 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1403 if (property->is_computed_name()) break;
1404 if (property->IsCompileTimeValue()) continue;
1405
1406 Literal* key = property->key()->AsLiteral();
1407 Expression* value = property->value();
1408 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001409 PushOperand(r3); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001410 result_saved = true;
1411 }
1412 switch (property->kind()) {
1413 case ObjectLiteral::Property::CONSTANT:
1414 UNREACHABLE();
1415 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1416 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1417 // Fall through.
1418 case ObjectLiteral::Property::COMPUTED:
1419 // It is safe to use [[Put]] here because the boilerplate already
1420 // contains computed properties with an uninitialized value.
1421 if (key->value()->IsInternalizedString()) {
1422 if (property->emit_store()) {
1423 VisitForAccumulatorValue(value);
1424 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1425 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1426 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1427 EmitLoadStoreICSlot(property->GetSlot(0));
1428 CallStoreIC();
1429 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1430
1431 if (NeedsHomeObject(value)) {
1432 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1433 }
1434 } else {
1435 VisitForEffect(value);
1436 }
1437 break;
1438 }
1439 // Duplicate receiver on stack.
1440 __ LoadP(r3, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001441 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001442 VisitForStackValue(key);
1443 VisitForStackValue(value);
1444 if (property->emit_store()) {
1445 if (NeedsHomeObject(value)) {
1446 EmitSetHomeObject(value, 2, property->GetSlot());
1447 }
1448 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
Ben Murdoch097c5b22016-05-18 11:27:45 +01001449 PushOperand(r3);
1450 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001451 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001452 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001453 }
1454 break;
1455 case ObjectLiteral::Property::PROTOTYPE:
1456 // Duplicate receiver on stack.
1457 __ LoadP(r3, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001458 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001459 VisitForStackValue(value);
1460 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001461 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001462 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1463 NO_REGISTERS);
1464 break;
1465 case ObjectLiteral::Property::GETTER:
1466 if (property->emit_store()) {
1467 accessor_table.lookup(key)->second->getter = property;
1468 }
1469 break;
1470 case ObjectLiteral::Property::SETTER:
1471 if (property->emit_store()) {
1472 accessor_table.lookup(key)->second->setter = property;
1473 }
1474 break;
1475 }
1476 }
1477
1478 // Emit code to define accessors, using only a single call to the runtime for
1479 // each pair of corresponding getters and setters.
1480 for (AccessorTable::Iterator it = accessor_table.begin();
1481 it != accessor_table.end(); ++it) {
1482 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001483 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 VisitForStackValue(it->first);
1485 EmitAccessor(it->second->getter);
1486 EmitAccessor(it->second->setter);
1487 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001488 PushOperand(r3);
1489 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001490 }
1491
1492 // Object literals have two parts. The "static" part on the left contains no
1493 // computed property names, and so we can compute its map ahead of time; see
1494 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1495 // starts with the first computed property name, and continues with all
1496 // properties to its right. All the code from above initializes the static
1497 // component of the object literal, and arranges for the map of the result to
1498 // reflect the static order in which the keys appear. For the dynamic
1499 // properties, we compile them into a series of "SetOwnProperty" runtime
1500 // calls. This will preserve insertion order.
1501 for (; property_index < expr->properties()->length(); property_index++) {
1502 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1503
1504 Expression* value = property->value();
1505 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001506 PushOperand(r3); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001507 result_saved = true;
1508 }
1509
1510 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001511 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001512
1513 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1514 DCHECK(!property->is_computed_name());
1515 VisitForStackValue(value);
1516 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001517 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1519 NO_REGISTERS);
1520 } else {
1521 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1522 VisitForStackValue(value);
1523 if (NeedsHomeObject(value)) {
1524 EmitSetHomeObject(value, 2, property->GetSlot());
1525 }
1526
1527 switch (property->kind()) {
1528 case ObjectLiteral::Property::CONSTANT:
1529 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1530 case ObjectLiteral::Property::COMPUTED:
1531 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001532 PushOperand(Smi::FromInt(NONE));
1533 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1534 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001536 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537 }
1538 break;
1539
1540 case ObjectLiteral::Property::PROTOTYPE:
1541 UNREACHABLE();
1542 break;
1543
1544 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001545 PushOperand(Smi::FromInt(NONE));
1546 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001547 break;
1548
1549 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001550 PushOperand(Smi::FromInt(NONE));
1551 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552 break;
1553 }
1554 }
1555 }
1556
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001557 if (result_saved) {
1558 context()->PlugTOS();
1559 } else {
1560 context()->Plug(r3);
1561 }
1562}
1563
1564
1565void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1566 Comment cmnt(masm_, "[ ArrayLiteral");
1567
1568 Handle<FixedArray> constant_elements = expr->constant_elements();
1569 bool has_fast_elements =
1570 IsFastObjectElementsKind(expr->constant_elements_kind());
1571 Handle<FixedArrayBase> constant_elements_values(
1572 FixedArrayBase::cast(constant_elements->get(1)));
1573
1574 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1575 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1576 // If the only customer of allocation sites is transitioning, then
1577 // we can turn it off if we don't have anywhere else to transition to.
1578 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1579 }
1580
1581 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1582 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1583 __ mov(r4, Operand(constant_elements));
1584 if (MustCreateArrayLiteralWithRuntime(expr)) {
1585 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1586 __ Push(r6, r5, r4, r3);
1587 __ CallRuntime(Runtime::kCreateArrayLiteral);
1588 } else {
1589 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1590 __ CallStub(&stub);
1591 }
1592 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1593
1594 bool result_saved = false; // Is the result saved to the stack?
1595 ZoneList<Expression*>* subexprs = expr->values();
1596 int length = subexprs->length();
1597
1598 // Emit code to evaluate all the non-constant subexpressions and to store
1599 // them into the newly cloned array.
1600 int array_index = 0;
1601 for (; array_index < length; array_index++) {
1602 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001603 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001604 // If the subexpression is a literal or a simple materialized literal it
1605 // is already set in the cloned array.
1606 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1607
1608 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001609 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610 result_saved = true;
1611 }
1612 VisitForAccumulatorValue(subexpr);
1613
1614 __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1615 Smi::FromInt(array_index));
1616 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1617 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1618 Handle<Code> ic =
1619 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1620 CallIC(ic);
1621
1622 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1623 }
1624
1625 // In case the array literal contains spread expressions it has two parts. The
1626 // first part is the "static" array which has a literal index is handled
1627 // above. The second part is the part after the first spread expression
1628 // (inclusive) and these elements gets appended to the array. Note that the
1629 // number elements an iterable produces is unknown ahead of time.
1630 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001631 PopOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001632 result_saved = false;
1633 }
1634 for (; array_index < length; array_index++) {
1635 Expression* subexpr = subexprs->at(array_index);
1636
Ben Murdoch097c5b22016-05-18 11:27:45 +01001637 PushOperand(r3);
1638 DCHECK(!subexpr->IsSpread());
1639 VisitForStackValue(subexpr);
1640 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001641
1642 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1643 }
1644
1645 if (result_saved) {
1646 context()->PlugTOS();
1647 } else {
1648 context()->Plug(r3);
1649 }
1650}
1651
1652
1653void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1654 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1655
1656 Comment cmnt(masm_, "[ Assignment");
1657 SetExpressionPosition(expr, INSERT_BREAK);
1658
1659 Property* property = expr->target()->AsProperty();
1660 LhsKind assign_type = Property::GetAssignType(property);
1661
1662 // Evaluate LHS expression.
1663 switch (assign_type) {
1664 case VARIABLE:
1665 // Nothing to do here.
1666 break;
1667 case NAMED_PROPERTY:
1668 if (expr->is_compound()) {
1669 // We need the receiver both on the stack and in the register.
1670 VisitForStackValue(property->obj());
1671 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1672 } else {
1673 VisitForStackValue(property->obj());
1674 }
1675 break;
1676 case NAMED_SUPER_PROPERTY:
1677 VisitForStackValue(
1678 property->obj()->AsSuperPropertyReference()->this_var());
1679 VisitForAccumulatorValue(
1680 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001681 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001682 if (expr->is_compound()) {
1683 const Register scratch = r4;
1684 __ LoadP(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001685 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001686 }
1687 break;
1688 case KEYED_SUPER_PROPERTY: {
1689 const Register scratch = r4;
1690 VisitForStackValue(
1691 property->obj()->AsSuperPropertyReference()->this_var());
1692 VisitForAccumulatorValue(
1693 property->obj()->AsSuperPropertyReference()->home_object());
1694 __ mr(scratch, result_register());
1695 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001696 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001697 if (expr->is_compound()) {
1698 const Register scratch1 = r5;
1699 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001700 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701 }
1702 break;
1703 }
1704 case KEYED_PROPERTY:
1705 if (expr->is_compound()) {
1706 VisitForStackValue(property->obj());
1707 VisitForStackValue(property->key());
1708 __ LoadP(LoadDescriptor::ReceiverRegister(),
1709 MemOperand(sp, 1 * kPointerSize));
1710 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1711 } else {
1712 VisitForStackValue(property->obj());
1713 VisitForStackValue(property->key());
1714 }
1715 break;
1716 }
1717
1718 // For compound assignments we need another deoptimization point after the
1719 // variable/property load.
1720 if (expr->is_compound()) {
1721 {
1722 AccumulatorValueContext context(this);
1723 switch (assign_type) {
1724 case VARIABLE:
1725 EmitVariableLoad(expr->target()->AsVariableProxy());
1726 PrepareForBailout(expr->target(), TOS_REG);
1727 break;
1728 case NAMED_PROPERTY:
1729 EmitNamedPropertyLoad(property);
1730 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1731 break;
1732 case NAMED_SUPER_PROPERTY:
1733 EmitNamedSuperPropertyLoad(property);
1734 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1735 break;
1736 case KEYED_SUPER_PROPERTY:
1737 EmitKeyedSuperPropertyLoad(property);
1738 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1739 break;
1740 case KEYED_PROPERTY:
1741 EmitKeyedPropertyLoad(property);
1742 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1743 break;
1744 }
1745 }
1746
1747 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001748 PushOperand(r3); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001749 VisitForAccumulatorValue(expr->value());
1750
1751 AccumulatorValueContext context(this);
1752 if (ShouldInlineSmiCase(op)) {
1753 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1754 expr->value());
1755 } else {
1756 EmitBinaryOp(expr->binary_operation(), op);
1757 }
1758
1759 // Deoptimization point in case the binary operation may have side effects.
1760 PrepareForBailout(expr->binary_operation(), TOS_REG);
1761 } else {
1762 VisitForAccumulatorValue(expr->value());
1763 }
1764
1765 SetExpressionPosition(expr);
1766
1767 // Store the value.
1768 switch (assign_type) {
1769 case VARIABLE:
1770 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1771 expr->op(), expr->AssignmentSlot());
1772 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1773 context()->Plug(r3);
1774 break;
1775 case NAMED_PROPERTY:
1776 EmitNamedPropertyAssignment(expr);
1777 break;
1778 case NAMED_SUPER_PROPERTY:
1779 EmitNamedSuperPropertyStore(property);
1780 context()->Plug(r3);
1781 break;
1782 case KEYED_SUPER_PROPERTY:
1783 EmitKeyedSuperPropertyStore(property);
1784 context()->Plug(r3);
1785 break;
1786 case KEYED_PROPERTY:
1787 EmitKeyedPropertyAssignment(expr);
1788 break;
1789 }
1790}
1791
1792
1793void FullCodeGenerator::VisitYield(Yield* expr) {
1794 Comment cmnt(masm_, "[ Yield");
1795 SetExpressionPosition(expr);
1796
1797 // Evaluate yielded value first; the initial iterator definition depends on
1798 // this. It stays on the stack while we update the iterator.
1799 VisitForStackValue(expr->expression());
1800
Ben Murdochda12d292016-06-02 14:46:10 +01001801 Label suspend, continuation, post_runtime, resume;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001802
Ben Murdochda12d292016-06-02 14:46:10 +01001803 __ b(&suspend);
1804 __ bind(&continuation);
1805 // When we arrive here, the stack top is the resume mode and
1806 // result_register() holds the input value (the argument given to the
1807 // respective resume operation).
1808 __ RecordGeneratorContinuation();
1809 __ pop(r4);
1810 __ CmpSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::RETURN), r0);
1811 __ bne(&resume);
1812 __ push(result_register());
1813 EmitCreateIteratorResult(true);
1814 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001815
Ben Murdochda12d292016-06-02 14:46:10 +01001816 __ bind(&suspend);
1817 OperandStackDepthIncrement(1); // Not popped on this path.
1818 VisitForAccumulatorValue(expr->generator_object());
1819 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1820 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
1821 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
1822 r0);
1823 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
1824 __ mr(r4, cp);
1825 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
1826 kLRHasBeenSaved, kDontSaveFPRegs);
1827 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1828 __ cmp(sp, r4);
1829 __ beq(&post_runtime);
1830 __ push(r3); // generator object
1831 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1832 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1833 __ bind(&post_runtime);
1834 PopOperand(result_register());
1835 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001836
Ben Murdochda12d292016-06-02 14:46:10 +01001837 __ bind(&resume);
1838 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001839}
1840
1841
1842void FullCodeGenerator::EmitGeneratorResume(
1843 Expression* generator, Expression* value,
1844 JSGeneratorObject::ResumeMode resume_mode) {
1845 // The value stays in r3, and is ultimately read by the resumed generator, as
1846 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1847 // is read to throw the value when the resumed generator is already closed.
1848 // r4 will hold the generator object until the activation has been resumed.
1849 VisitForStackValue(generator);
1850 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001851 PopOperand(r4);
1852
1853 // Store input value into generator object.
1854 __ StoreP(result_register(),
1855 FieldMemOperand(r4, JSGeneratorObject::kInputOffset), r0);
1856 __ mr(r5, result_register());
1857 __ RecordWriteField(r4, JSGeneratorObject::kInputOffset, r5, r6,
1858 kLRHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001859
1860 // Load suspended function and context.
1861 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
1862 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
1863
1864 // Load receiver and store as the first argument.
1865 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
1866 __ push(r5);
1867
Ben Murdochda12d292016-06-02 14:46:10 +01001868 // Push holes for arguments to generator function. Since the parser forced
1869 // context allocation for any variables in generators, the actual argument
1870 // values have already been copied into the context and these dummy values
1871 // will never be used.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001872 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
1873 __ LoadWordArith(
1874 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1875 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
1876 Label argument_loop, push_frame;
1877#if V8_TARGET_ARCH_PPC64
1878 __ cmpi(r6, Operand::Zero());
1879 __ beq(&push_frame);
1880#else
1881 __ SmiUntag(r6, SetRC);
1882 __ beq(&push_frame, cr0);
1883#endif
1884 __ mtctr(r6);
1885 __ bind(&argument_loop);
1886 __ push(r5);
1887 __ bdnz(&argument_loop);
1888
1889 // Enter a new JavaScript frame, and initialize its slots as they were when
1890 // the generator was suspended.
1891 Label resume_frame, done;
1892 __ bind(&push_frame);
1893 __ b(&resume_frame, SetLK);
1894 __ b(&done);
1895 __ bind(&resume_frame);
1896 // lr = return address.
1897 // fp = caller's frame pointer.
1898 // cp = callee's context,
1899 // r7 = callee's JS function.
Ben Murdochda12d292016-06-02 14:46:10 +01001900 __ PushStandardFrame(r7);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001901
1902 // Load the operand stack size.
1903 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
1904 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
1905 __ SmiUntag(r6, SetRC);
1906
1907 // If we are sending a value and there is no operand stack, we can jump back
1908 // in directly.
1909 Label call_resume;
1910 if (resume_mode == JSGeneratorObject::NEXT) {
1911 Label slow_resume;
1912 __ bne(&slow_resume, cr0);
1913 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
1914 {
1915 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
1916 if (FLAG_enable_embedded_constant_pool) {
1917 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
1918 }
1919 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
1920 __ SmiUntag(r5);
1921 __ add(ip, ip, r5);
1922 __ LoadSmiLiteral(r5,
1923 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
1924 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
1925 r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001926 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001927 __ Jump(ip);
1928 __ bind(&slow_resume);
1929 }
1930 } else {
1931 __ beq(&call_resume, cr0);
1932 }
1933
1934 // Otherwise, we push holes for the operand stack and call the runtime to fix
1935 // up the stack and the handlers.
1936 Label operand_loop;
1937 __ mtctr(r6);
1938 __ bind(&operand_loop);
1939 __ push(r5);
1940 __ bdnz(&operand_loop);
1941
1942 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001943 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001944 DCHECK(!result_register().is(r4));
1945 __ Push(r4, result_register());
1946 __ Push(Smi::FromInt(resume_mode));
1947 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
1948 // Not reached: the runtime call returns elsewhere.
1949 __ stop("not-reached");
1950
1951 __ bind(&done);
1952 context()->Plug(result_register());
1953}
1954
Ben Murdoch097c5b22016-05-18 11:27:45 +01001955void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1956 OperandStackDepthIncrement(2);
1957 __ Push(reg1, reg2);
1958}
1959
1960void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1961 Register reg3) {
1962 OperandStackDepthIncrement(3);
1963 __ Push(reg1, reg2, reg3);
1964}
1965
1966void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1967 Register reg3, Register reg4) {
1968 OperandStackDepthIncrement(4);
1969 __ Push(reg1, reg2, reg3, reg4);
1970}
1971
1972void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1973 OperandStackDepthDecrement(2);
1974 __ Pop(reg1, reg2);
1975}
1976
1977void FullCodeGenerator::EmitOperandStackDepthCheck() {
1978 if (FLAG_debug_code) {
1979 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1980 operand_stack_depth_ * kPointerSize;
1981 __ sub(r3, fp, sp);
1982 __ cmpi(r3, Operand(expected_diff));
1983 __ Assert(eq, kUnexpectedStackDepth);
1984 }
1985}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001986
1987void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1988 Label allocate, done_allocate;
1989
1990 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate, TAG_OBJECT);
1991 __ b(&done_allocate);
1992
1993 __ bind(&allocate);
1994 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1995 __ CallRuntime(Runtime::kAllocateInNewSpace);
1996
1997 __ bind(&done_allocate);
1998 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
Ben Murdochda12d292016-06-02 14:46:10 +01001999 PopOperand(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002000 __ LoadRoot(r6,
2001 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2002 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
2003 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2004 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2005 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2006 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
2007 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
2008}
2009
2010
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002011void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2012 Token::Value op,
2013 Expression* left_expr,
2014 Expression* right_expr) {
2015 Label done, smi_case, stub_call;
2016
2017 Register scratch1 = r5;
2018 Register scratch2 = r6;
2019
2020 // Get the arguments.
2021 Register left = r4;
2022 Register right = r3;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002023 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002024
2025 // Perform combined smi check on both operands.
2026 __ orx(scratch1, left, right);
2027 STATIC_ASSERT(kSmiTag == 0);
2028 JumpPatchSite patch_site(masm_);
2029 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2030
2031 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002032 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002033 CallIC(code, expr->BinaryOperationFeedbackId());
2034 patch_site.EmitPatchInfo();
2035 __ b(&done);
2036
2037 __ bind(&smi_case);
2038 // Smi case. This code works the same way as the smi-smi case in the type
2039 // recording binary operation stub.
2040 switch (op) {
2041 case Token::SAR:
2042 __ GetLeastBitsFromSmi(scratch1, right, 5);
2043 __ ShiftRightArith(right, left, scratch1);
2044 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2045 break;
2046 case Token::SHL: {
2047 __ GetLeastBitsFromSmi(scratch2, right, 5);
2048#if V8_TARGET_ARCH_PPC64
2049 __ ShiftLeft_(right, left, scratch2);
2050#else
2051 __ SmiUntag(scratch1, left);
2052 __ ShiftLeft_(scratch1, scratch1, scratch2);
2053 // Check that the *signed* result fits in a smi
2054 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2055 __ SmiTag(right, scratch1);
2056#endif
2057 break;
2058 }
2059 case Token::SHR: {
2060 __ SmiUntag(scratch1, left);
2061 __ GetLeastBitsFromSmi(scratch2, right, 5);
2062 __ srw(scratch1, scratch1, scratch2);
2063 // Unsigned shift is not allowed to produce a negative number.
2064 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2065 __ SmiTag(right, scratch1);
2066 break;
2067 }
2068 case Token::ADD: {
2069 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2070 __ BranchOnOverflow(&stub_call);
2071 __ mr(right, scratch1);
2072 break;
2073 }
2074 case Token::SUB: {
2075 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2076 __ BranchOnOverflow(&stub_call);
2077 __ mr(right, scratch1);
2078 break;
2079 }
2080 case Token::MUL: {
2081 Label mul_zero;
2082#if V8_TARGET_ARCH_PPC64
2083 // Remove tag from both operands.
2084 __ SmiUntag(ip, right);
2085 __ SmiUntag(r0, left);
2086 __ Mul(scratch1, r0, ip);
2087 // Check for overflowing the smi range - no overflow if higher 33 bits of
2088 // the result are identical.
2089 __ TestIfInt32(scratch1, r0);
2090 __ bne(&stub_call);
2091#else
2092 __ SmiUntag(ip, right);
2093 __ mullw(scratch1, left, ip);
2094 __ mulhw(scratch2, left, ip);
2095 // Check for overflowing the smi range - no overflow if higher 33 bits of
2096 // the result are identical.
2097 __ TestIfInt32(scratch2, scratch1, ip);
2098 __ bne(&stub_call);
2099#endif
2100 // Go slow on zero result to handle -0.
2101 __ cmpi(scratch1, Operand::Zero());
2102 __ beq(&mul_zero);
2103#if V8_TARGET_ARCH_PPC64
2104 __ SmiTag(right, scratch1);
2105#else
2106 __ mr(right, scratch1);
2107#endif
2108 __ b(&done);
2109 // We need -0 if we were multiplying a negative number with 0 to get 0.
2110 // We know one of them was zero.
2111 __ bind(&mul_zero);
2112 __ add(scratch2, right, left);
2113 __ cmpi(scratch2, Operand::Zero());
2114 __ blt(&stub_call);
2115 __ LoadSmiLiteral(right, Smi::FromInt(0));
2116 break;
2117 }
2118 case Token::BIT_OR:
2119 __ orx(right, left, right);
2120 break;
2121 case Token::BIT_AND:
2122 __ and_(right, left, right);
2123 break;
2124 case Token::BIT_XOR:
2125 __ xor_(right, left, right);
2126 break;
2127 default:
2128 UNREACHABLE();
2129 }
2130
2131 __ bind(&done);
2132 context()->Plug(r3);
2133}
2134
2135
2136void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002137 for (int i = 0; i < lit->properties()->length(); i++) {
2138 ObjectLiteral::Property* property = lit->properties()->at(i);
2139 Expression* value = property->value();
2140
Ben Murdoch097c5b22016-05-18 11:27:45 +01002141 Register scratch = r4;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002142 if (property->is_static()) {
2143 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2144 } else {
2145 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2146 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002147 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002148 EmitPropertyKey(property, lit->GetIdForProperty(i));
2149
2150 // The static prototype property is read only. We handle the non computed
2151 // property name case in the parser. Since this is the only case where we
2152 // need to check for an own read only property we special case this so we do
2153 // not need to do this for every property.
2154 if (property->is_static() && property->is_computed_name()) {
2155 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2156 __ push(r3);
2157 }
2158
2159 VisitForStackValue(value);
2160 if (NeedsHomeObject(value)) {
2161 EmitSetHomeObject(value, 2, property->GetSlot());
2162 }
2163
2164 switch (property->kind()) {
2165 case ObjectLiteral::Property::CONSTANT:
2166 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2167 case ObjectLiteral::Property::PROTOTYPE:
2168 UNREACHABLE();
2169 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002170 PushOperand(Smi::FromInt(DONT_ENUM));
2171 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2172 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002173 break;
2174
2175 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002176 PushOperand(Smi::FromInt(DONT_ENUM));
2177 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002178 break;
2179
2180 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002181 PushOperand(Smi::FromInt(DONT_ENUM));
2182 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002183 break;
2184
2185 default:
2186 UNREACHABLE();
2187 }
2188 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002189}
2190
2191
2192void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002193 PopOperand(r4);
2194 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002195 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2196 CallIC(code, expr->BinaryOperationFeedbackId());
2197 patch_site.EmitPatchInfo();
2198 context()->Plug(r3);
2199}
2200
2201
2202void FullCodeGenerator::EmitAssignment(Expression* expr,
2203 FeedbackVectorSlot slot) {
2204 DCHECK(expr->IsValidReferenceExpressionOrThis());
2205
2206 Property* prop = expr->AsProperty();
2207 LhsKind assign_type = Property::GetAssignType(prop);
2208
2209 switch (assign_type) {
2210 case VARIABLE: {
2211 Variable* var = expr->AsVariableProxy()->var();
2212 EffectContext context(this);
2213 EmitVariableAssignment(var, Token::ASSIGN, slot);
2214 break;
2215 }
2216 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002217 PushOperand(r3); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002218 VisitForAccumulatorValue(prop->obj());
2219 __ Move(StoreDescriptor::ReceiverRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002220 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002221 __ mov(StoreDescriptor::NameRegister(),
2222 Operand(prop->key()->AsLiteral()->value()));
2223 EmitLoadStoreICSlot(slot);
2224 CallStoreIC();
2225 break;
2226 }
2227 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002228 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002229 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2230 VisitForAccumulatorValue(
2231 prop->obj()->AsSuperPropertyReference()->home_object());
2232 // stack: value, this; r3: home_object
2233 Register scratch = r5;
2234 Register scratch2 = r6;
2235 __ mr(scratch, result_register()); // home_object
2236 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2237 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2238 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2239 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2240 // stack: this, home_object; r3: value
2241 EmitNamedSuperPropertyStore(prop);
2242 break;
2243 }
2244 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002245 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002246 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2247 VisitForStackValue(
2248 prop->obj()->AsSuperPropertyReference()->home_object());
2249 VisitForAccumulatorValue(prop->key());
2250 Register scratch = r5;
2251 Register scratch2 = r6;
2252 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2253 // stack: value, this, home_object; r3: key, r6: value
2254 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2255 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2256 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2257 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2258 __ StoreP(r3, MemOperand(sp, 0));
2259 __ Move(r3, scratch2);
2260 // stack: this, home_object, key; r3: value.
2261 EmitKeyedSuperPropertyStore(prop);
2262 break;
2263 }
2264 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002265 PushOperand(r3); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002266 VisitForStackValue(prop->obj());
2267 VisitForAccumulatorValue(prop->key());
2268 __ Move(StoreDescriptor::NameRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002269 PopOperands(StoreDescriptor::ValueRegister(),
2270 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002271 EmitLoadStoreICSlot(slot);
2272 Handle<Code> ic =
2273 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2274 CallIC(ic);
2275 break;
2276 }
2277 }
2278 context()->Plug(r3);
2279}
2280
2281
2282void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2283 Variable* var, MemOperand location) {
2284 __ StoreP(result_register(), location, r0);
2285 if (var->IsContextSlot()) {
2286 // RecordWrite may destroy all its register arguments.
2287 __ mr(r6, result_register());
2288 int offset = Context::SlotOffset(var->index());
2289 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2290 kDontSaveFPRegs);
2291 }
2292}
2293
2294
2295void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2296 FeedbackVectorSlot slot) {
2297 if (var->IsUnallocated()) {
2298 // Global var, const, or let.
2299 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2300 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2301 EmitLoadStoreICSlot(slot);
2302 CallStoreIC();
2303
2304 } else if (var->mode() == LET && op != Token::INIT) {
2305 // Non-initializing assignment to let variable needs a write barrier.
2306 DCHECK(!var->IsLookupSlot());
2307 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2308 Label assign;
2309 MemOperand location = VarOperand(var, r4);
2310 __ LoadP(r6, location);
2311 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2312 __ bne(&assign);
2313 __ mov(r6, Operand(var->name()));
2314 __ push(r6);
2315 __ CallRuntime(Runtime::kThrowReferenceError);
2316 // Perform the assignment.
2317 __ bind(&assign);
2318 EmitStoreToStackLocalOrContextSlot(var, location);
2319
2320 } else if (var->mode() == CONST && op != Token::INIT) {
2321 // Assignment to const variable needs a write barrier.
2322 DCHECK(!var->IsLookupSlot());
2323 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2324 Label const_error;
2325 MemOperand location = VarOperand(var, r4);
2326 __ LoadP(r6, location);
2327 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2328 __ bne(&const_error);
2329 __ mov(r6, Operand(var->name()));
2330 __ push(r6);
2331 __ CallRuntime(Runtime::kThrowReferenceError);
2332 __ bind(&const_error);
2333 __ CallRuntime(Runtime::kThrowConstAssignError);
2334
2335 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2336 // Initializing assignment to const {this} needs a write barrier.
2337 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2338 Label uninitialized_this;
2339 MemOperand location = VarOperand(var, r4);
2340 __ LoadP(r6, location);
2341 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2342 __ beq(&uninitialized_this);
2343 __ mov(r4, Operand(var->name()));
2344 __ push(r4);
2345 __ CallRuntime(Runtime::kThrowReferenceError);
2346 __ bind(&uninitialized_this);
2347 EmitStoreToStackLocalOrContextSlot(var, location);
2348
2349 } else if (!var->is_const_mode() ||
2350 (var->mode() == CONST && op == Token::INIT)) {
2351 if (var->IsLookupSlot()) {
2352 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002353 __ Push(var->name());
2354 __ Push(r3);
2355 __ CallRuntime(is_strict(language_mode())
2356 ? Runtime::kStoreLookupSlot_Strict
2357 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002358 } else {
2359 // Assignment to var or initializing assignment to let/const in harmony
2360 // mode.
2361 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2362 MemOperand location = VarOperand(var, r4);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002363 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002364 // Check for an uninitialized let binding.
2365 __ LoadP(r5, location);
2366 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2367 __ Check(eq, kLetBindingReInitialization);
2368 }
2369 EmitStoreToStackLocalOrContextSlot(var, location);
2370 }
2371 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2372 // Const initializers need a write barrier.
2373 DCHECK(!var->IsParameter()); // No const parameters.
2374 if (var->IsLookupSlot()) {
2375 __ push(r3);
2376 __ mov(r3, Operand(var->name()));
2377 __ Push(cp, r3); // Context and name.
2378 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2379 } else {
2380 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2381 Label skip;
2382 MemOperand location = VarOperand(var, r4);
2383 __ LoadP(r5, location);
2384 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2385 __ bne(&skip);
2386 EmitStoreToStackLocalOrContextSlot(var, location);
2387 __ bind(&skip);
2388 }
2389
2390 } else {
2391 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2392 if (is_strict(language_mode())) {
2393 __ CallRuntime(Runtime::kThrowConstAssignError);
2394 }
2395 // Silently ignore store in sloppy mode.
2396 }
2397}
2398
2399
2400void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2401 // Assignment to a property, using a named store IC.
2402 Property* prop = expr->target()->AsProperty();
2403 DCHECK(prop != NULL);
2404 DCHECK(prop->key()->IsLiteral());
2405
2406 __ mov(StoreDescriptor::NameRegister(),
2407 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002408 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002409 EmitLoadStoreICSlot(expr->AssignmentSlot());
2410 CallStoreIC();
2411
2412 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2413 context()->Plug(r3);
2414}
2415
2416
2417void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2418 // Assignment to named property of super.
2419 // r3 : value
2420 // stack : receiver ('this'), home_object
2421 DCHECK(prop != NULL);
2422 Literal* key = prop->key()->AsLiteral();
2423 DCHECK(key != NULL);
2424
Ben Murdoch097c5b22016-05-18 11:27:45 +01002425 PushOperand(key->value());
2426 PushOperand(r3);
2427 CallRuntimeWithOperands((is_strict(language_mode())
2428 ? Runtime::kStoreToSuper_Strict
2429 : Runtime::kStoreToSuper_Sloppy));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002430}
2431
2432
2433void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2434 // Assignment to named property of super.
2435 // r3 : value
2436 // stack : receiver ('this'), home_object, key
2437 DCHECK(prop != NULL);
2438
Ben Murdoch097c5b22016-05-18 11:27:45 +01002439 PushOperand(r3);
2440 CallRuntimeWithOperands((is_strict(language_mode())
2441 ? Runtime::kStoreKeyedToSuper_Strict
2442 : Runtime::kStoreKeyedToSuper_Sloppy));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002443}
2444
2445
2446void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2447 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002448 PopOperands(StoreDescriptor::ReceiverRegister(),
2449 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002450 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2451
2452 Handle<Code> ic =
2453 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2454 EmitLoadStoreICSlot(expr->AssignmentSlot());
2455 CallIC(ic);
2456
2457 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2458 context()->Plug(r3);
2459}
2460
2461
2462void FullCodeGenerator::VisitProperty(Property* expr) {
2463 Comment cmnt(masm_, "[ Property");
2464 SetExpressionPosition(expr);
2465
2466 Expression* key = expr->key();
2467
2468 if (key->IsPropertyName()) {
2469 if (!expr->IsSuperAccess()) {
2470 VisitForAccumulatorValue(expr->obj());
2471 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2472 EmitNamedPropertyLoad(expr);
2473 } else {
2474 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2475 VisitForStackValue(
2476 expr->obj()->AsSuperPropertyReference()->home_object());
2477 EmitNamedSuperPropertyLoad(expr);
2478 }
2479 } else {
2480 if (!expr->IsSuperAccess()) {
2481 VisitForStackValue(expr->obj());
2482 VisitForAccumulatorValue(expr->key());
2483 __ Move(LoadDescriptor::NameRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002484 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002485 EmitKeyedPropertyLoad(expr);
2486 } else {
2487 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2488 VisitForStackValue(
2489 expr->obj()->AsSuperPropertyReference()->home_object());
2490 VisitForStackValue(expr->key());
2491 EmitKeyedSuperPropertyLoad(expr);
2492 }
2493 }
2494 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2495 context()->Plug(r3);
2496}
2497
2498
2499void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2500 ic_total_count_++;
2501 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2502}
2503
2504
2505// Code common for calls using the IC.
2506void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2507 Expression* callee = expr->expression();
2508
2509 // Get the target function.
2510 ConvertReceiverMode convert_mode;
2511 if (callee->IsVariableProxy()) {
2512 {
2513 StackValueContext context(this);
2514 EmitVariableLoad(callee->AsVariableProxy());
2515 PrepareForBailout(callee, NO_REGISTERS);
2516 }
2517 // Push undefined as receiver. This is patched in the method prologue if it
2518 // is a sloppy mode method.
2519 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002520 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002521 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2522 } else {
2523 // Load the function from the receiver.
2524 DCHECK(callee->IsProperty());
2525 DCHECK(!callee->AsProperty()->IsSuperAccess());
2526 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2527 EmitNamedPropertyLoad(callee->AsProperty());
2528 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2529 // Push the target function under the receiver.
2530 __ LoadP(r0, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002531 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002532 __ StoreP(r3, MemOperand(sp, kPointerSize));
2533 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2534 }
2535
2536 EmitCall(expr, convert_mode);
2537}
2538
2539
2540void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2541 Expression* callee = expr->expression();
2542 DCHECK(callee->IsProperty());
2543 Property* prop = callee->AsProperty();
2544 DCHECK(prop->IsSuperAccess());
2545 SetExpressionPosition(prop);
2546
2547 Literal* key = prop->key()->AsLiteral();
2548 DCHECK(!key->value()->IsSmi());
2549 // Load the function from the receiver.
2550 const Register scratch = r4;
2551 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2552 VisitForAccumulatorValue(super_ref->home_object());
2553 __ mr(scratch, r3);
2554 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002555 PushOperands(scratch, r3, r3, scratch);
2556 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002557
2558 // Stack here:
2559 // - home_object
2560 // - this (receiver)
2561 // - this (receiver) <-- LoadFromSuper will pop here and below.
2562 // - home_object
2563 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002564 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002565
2566 // Replace home_object with target function.
2567 __ StoreP(r3, MemOperand(sp, kPointerSize));
2568
2569 // Stack here:
2570 // - target function
2571 // - this (receiver)
2572 EmitCall(expr);
2573}
2574
2575
2576// Code common for calls using the IC.
2577void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2578 // Load the key.
2579 VisitForAccumulatorValue(key);
2580
2581 Expression* callee = expr->expression();
2582
2583 // Load the function from the receiver.
2584 DCHECK(callee->IsProperty());
2585 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2586 __ Move(LoadDescriptor::NameRegister(), r3);
2587 EmitKeyedPropertyLoad(callee->AsProperty());
2588 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2589
2590 // Push the target function under the receiver.
2591 __ LoadP(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002592 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002593 __ StoreP(r3, MemOperand(sp, kPointerSize));
2594
2595 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2596}
2597
2598
2599void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2600 Expression* callee = expr->expression();
2601 DCHECK(callee->IsProperty());
2602 Property* prop = callee->AsProperty();
2603 DCHECK(prop->IsSuperAccess());
2604
2605 SetExpressionPosition(prop);
2606 // Load the function from the receiver.
2607 const Register scratch = r4;
2608 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2609 VisitForAccumulatorValue(super_ref->home_object());
2610 __ mr(scratch, r3);
2611 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002612 PushOperands(scratch, r3, r3, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002613 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002614
2615 // Stack here:
2616 // - home_object
2617 // - this (receiver)
2618 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2619 // - home_object
2620 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002621 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002622
2623 // Replace home_object with target function.
2624 __ StoreP(r3, MemOperand(sp, kPointerSize));
2625
2626 // Stack here:
2627 // - target function
2628 // - this (receiver)
2629 EmitCall(expr);
2630}
2631
2632
2633void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2634 // Load the arguments.
2635 ZoneList<Expression*>* args = expr->arguments();
2636 int arg_count = args->length();
2637 for (int i = 0; i < arg_count; i++) {
2638 VisitForStackValue(args->at(i));
2639 }
2640
2641 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002642 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002643 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2644 if (FLAG_trace) {
2645 __ CallRuntime(Runtime::kTraceTailCall);
2646 }
2647 // Update profiling counters before the tail call since we will
2648 // not return to this function.
2649 EmitProfilingCounterHandlingForReturnSequence(true);
2650 }
2651 Handle<Code> ic =
2652 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2653 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002654 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
2655 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2656 // Don't assign a type feedback id to the IC, since type feedback is provided
2657 // by the vector above.
2658 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002659 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002660
2661 RecordJSReturnSite(expr);
2662 // Restore context register.
2663 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2664 context()->DropAndPlug(1, r3);
2665}
2666
2667
2668void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2669 // r7: copy of the first argument or undefined if it doesn't exist.
2670 if (arg_count > 0) {
2671 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
2672 } else {
2673 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2674 }
2675
2676 // r6: the receiver of the enclosing function.
2677 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2678
2679 // r5: language mode.
2680 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
2681
2682 // r4: the start position of the scope the calls resides in.
2683 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
2684
2685 // Do the runtime call.
2686 __ Push(r7, r6, r5, r4);
2687 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2688}
2689
2690
2691// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2692void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2693 VariableProxy* callee = expr->expression()->AsVariableProxy();
2694 if (callee->var()->IsLookupSlot()) {
2695 Label slow, done;
2696 SetExpressionPosition(callee);
2697 // Generate code for loading from variables potentially shadowed by
2698 // eval-introduced variables.
2699 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2700
2701 __ bind(&slow);
2702 // Call the runtime to find the function to call (returned in r3) and
2703 // the object holding it (returned in r4).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002704 __ Push(callee->name());
2705 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2706 PushOperands(r3, r4); // Function, receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002707 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2708
2709 // If fast case code has been generated, emit code to push the function
2710 // and receiver and have the slow path jump around this code.
2711 if (done.is_linked()) {
2712 Label call;
2713 __ b(&call);
2714 __ bind(&done);
2715 // Push function.
2716 __ push(r3);
2717 // Pass undefined as the receiver, which is the WithBaseObject of a
2718 // non-object environment record. If the callee is sloppy, it will patch
2719 // it up to be the global receiver.
2720 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2721 __ push(r4);
2722 __ bind(&call);
2723 }
2724 } else {
2725 VisitForStackValue(callee);
2726 // refEnv.WithBaseObject()
2727 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002728 PushOperand(r5); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002729 }
2730}
2731
2732
2733void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2734 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2735 // to resolve the function we need to call. Then we call the resolved
2736 // function using the given arguments.
2737 ZoneList<Expression*>* args = expr->arguments();
2738 int arg_count = args->length();
2739
2740 PushCalleeAndWithBaseObject(expr);
2741
2742 // Push the arguments.
2743 for (int i = 0; i < arg_count; i++) {
2744 VisitForStackValue(args->at(i));
2745 }
2746
2747 // Push a copy of the function (found below the arguments) and
2748 // resolve eval.
2749 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2750 __ push(r4);
2751 EmitResolvePossiblyDirectEval(arg_count);
2752
2753 // Touch up the stack with the resolved function.
2754 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2755
2756 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2757
2758 // Record source position for debugger.
2759 SetCallPosition(expr);
2760 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2761 __ mov(r3, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002762 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2763 expr->tail_call_mode()),
2764 RelocInfo::CODE_TARGET);
2765 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002766 RecordJSReturnSite(expr);
2767 // Restore context register.
2768 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2769 context()->DropAndPlug(1, r3);
2770}
2771
2772
2773void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2774 Comment cmnt(masm_, "[ CallNew");
2775 // According to ECMA-262, section 11.2.2, page 44, the function
2776 // expression in new calls must be evaluated before the
2777 // arguments.
2778
2779 // Push constructor on the stack. If it's not a function it's used as
2780 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2781 // ignored.
2782 DCHECK(!expr->expression()->IsSuperPropertyReference());
2783 VisitForStackValue(expr->expression());
2784
2785 // Push the arguments ("left-to-right") on the stack.
2786 ZoneList<Expression*>* args = expr->arguments();
2787 int arg_count = args->length();
2788 for (int i = 0; i < arg_count; i++) {
2789 VisitForStackValue(args->at(i));
2790 }
2791
2792 // Call the construct call builtin that handles allocation and
2793 // constructor invocation.
2794 SetConstructCallPosition(expr);
2795
2796 // Load function and argument count into r4 and r3.
2797 __ mov(r3, Operand(arg_count));
2798 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2799
2800 // Record call targets in unoptimized code.
2801 __ EmitLoadTypeFeedbackVector(r5);
2802 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
2803
2804 CallConstructStub stub(isolate());
2805 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002806 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002807 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2808 // Restore context register.
2809 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2810 context()->Plug(r3);
2811}
2812
2813
2814void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2815 SuperCallReference* super_call_ref =
2816 expr->expression()->AsSuperCallReference();
2817 DCHECK_NOT_NULL(super_call_ref);
2818
2819 // Push the super constructor target on the stack (may be null,
2820 // but the Construct builtin can deal with that properly).
2821 VisitForAccumulatorValue(super_call_ref->this_function_var());
2822 __ AssertFunction(result_register());
2823 __ LoadP(result_register(),
2824 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2825 __ LoadP(result_register(),
2826 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002827 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002828
2829 // Push the arguments ("left-to-right") on the stack.
2830 ZoneList<Expression*>* args = expr->arguments();
2831 int arg_count = args->length();
2832 for (int i = 0; i < arg_count; i++) {
2833 VisitForStackValue(args->at(i));
2834 }
2835
2836 // Call the construct call builtin that handles allocation and
2837 // constructor invocation.
2838 SetConstructCallPosition(expr);
2839
2840 // Load new target into r6.
2841 VisitForAccumulatorValue(super_call_ref->new_target_var());
2842 __ mr(r6, result_register());
2843
2844 // Load function and argument count into r1 and r0.
2845 __ mov(r3, Operand(arg_count));
2846 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
2847
2848 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002849 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002850
2851 RecordJSReturnSite(expr);
2852
2853 // Restore context register.
2854 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2855 context()->Plug(r3);
2856}
2857
2858
2859void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2860 ZoneList<Expression*>* args = expr->arguments();
2861 DCHECK(args->length() == 1);
2862
2863 VisitForAccumulatorValue(args->at(0));
2864
2865 Label materialize_true, materialize_false;
2866 Label* if_true = NULL;
2867 Label* if_false = NULL;
2868 Label* fall_through = NULL;
2869 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2870 &if_false, &fall_through);
2871
2872 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2873 __ TestIfSmi(r3, r0);
2874 Split(eq, if_true, if_false, fall_through, cr0);
2875
2876 context()->Plug(if_true, if_false);
2877}
2878
2879
2880void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2881 ZoneList<Expression*>* args = expr->arguments();
2882 DCHECK(args->length() == 1);
2883
2884 VisitForAccumulatorValue(args->at(0));
2885
2886 Label materialize_true, materialize_false;
2887 Label* if_true = NULL;
2888 Label* if_false = NULL;
2889 Label* fall_through = NULL;
2890 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2891 &if_false, &fall_through);
2892
2893 __ JumpIfSmi(r3, if_false);
2894 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2895 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2896 Split(ge, if_true, if_false, fall_through);
2897
2898 context()->Plug(if_true, if_false);
2899}
2900
2901
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002902void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2903 ZoneList<Expression*>* args = expr->arguments();
2904 DCHECK(args->length() == 1);
2905
2906 VisitForAccumulatorValue(args->at(0));
2907
2908 Label materialize_true, materialize_false;
2909 Label* if_true = NULL;
2910 Label* if_false = NULL;
2911 Label* fall_through = NULL;
2912 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2913 &if_false, &fall_through);
2914
2915 __ JumpIfSmi(r3, if_false);
2916 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
2917 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2918 Split(eq, if_true, if_false, fall_through);
2919
2920 context()->Plug(if_true, if_false);
2921}
2922
2923
2924void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2925 ZoneList<Expression*>* args = expr->arguments();
2926 DCHECK(args->length() == 1);
2927
2928 VisitForAccumulatorValue(args->at(0));
2929
2930 Label materialize_true, materialize_false;
2931 Label* if_true = NULL;
2932 Label* if_false = NULL;
2933 Label* fall_through = NULL;
2934 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2935 &if_false, &fall_through);
2936
2937 __ JumpIfSmi(r3, if_false);
2938 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
2939 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2940 Split(eq, if_true, if_false, fall_through);
2941
2942 context()->Plug(if_true, if_false);
2943}
2944
2945
2946void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2947 ZoneList<Expression*>* args = expr->arguments();
2948 DCHECK(args->length() == 1);
2949
2950 VisitForAccumulatorValue(args->at(0));
2951
2952 Label materialize_true, materialize_false;
2953 Label* if_true = NULL;
2954 Label* if_false = NULL;
2955 Label* fall_through = NULL;
2956 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2957 &if_false, &fall_through);
2958
2959 __ JumpIfSmi(r3, if_false);
2960 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
2961 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2962 Split(eq, if_true, if_false, fall_through);
2963
2964 context()->Plug(if_true, if_false);
2965}
2966
2967
2968void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2969 ZoneList<Expression*>* args = expr->arguments();
2970 DCHECK(args->length() == 1);
2971
2972 VisitForAccumulatorValue(args->at(0));
2973
2974 Label materialize_true, materialize_false;
2975 Label* if_true = NULL;
2976 Label* if_false = NULL;
2977 Label* fall_through = NULL;
2978 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2979 &if_false, &fall_through);
2980
2981 __ JumpIfSmi(r3, if_false);
2982 __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE);
2983 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2984 Split(eq, if_true, if_false, fall_through);
2985
2986 context()->Plug(if_true, if_false);
2987}
2988
2989
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002990void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2991 ZoneList<Expression*>* args = expr->arguments();
2992 DCHECK(args->length() == 1);
2993 Label done, null, function, non_function_constructor;
2994
2995 VisitForAccumulatorValue(args->at(0));
2996
2997 // If the object is not a JSReceiver, we return null.
2998 __ JumpIfSmi(r3, &null);
2999 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3000 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
3001 // Map is now in r3.
3002 __ blt(&null);
3003
Ben Murdochda12d292016-06-02 14:46:10 +01003004 // Return 'Function' for JSFunction and JSBoundFunction objects.
3005 __ cmpli(r4, Operand(FIRST_FUNCTION_TYPE));
3006 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
3007 __ bge(&function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003008
3009 // Check if the constructor in the map is a JS function.
3010 Register instance_type = r5;
3011 __ GetMapConstructor(r3, r3, r4, instance_type);
3012 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
3013 __ bne(&non_function_constructor);
3014
3015 // r3 now contains the constructor function. Grab the
3016 // instance class name from there.
3017 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3018 __ LoadP(r3,
3019 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3020 __ b(&done);
3021
3022 // Functions have class 'Function'.
3023 __ bind(&function);
3024 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3025 __ b(&done);
3026
3027 // Objects with a non-function constructor have class 'Object'.
3028 __ bind(&non_function_constructor);
3029 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3030 __ b(&done);
3031
3032 // Non-JS objects have class null.
3033 __ bind(&null);
3034 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3035
3036 // All done.
3037 __ bind(&done);
3038
3039 context()->Plug(r3);
3040}
3041
3042
3043void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3044 ZoneList<Expression*>* args = expr->arguments();
3045 DCHECK(args->length() == 1);
3046 VisitForAccumulatorValue(args->at(0)); // Load the object.
3047
3048 Label done;
3049 // If the object is a smi return the object.
3050 __ JumpIfSmi(r3, &done);
3051 // If the object is not a value type, return the object.
3052 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3053 __ bne(&done);
3054 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3055
3056 __ bind(&done);
3057 context()->Plug(r3);
3058}
3059
3060
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003061void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3062 ZoneList<Expression*>* args = expr->arguments();
3063 DCHECK_EQ(3, args->length());
3064
3065 Register string = r3;
3066 Register index = r4;
3067 Register value = r5;
3068
3069 VisitForStackValue(args->at(0)); // index
3070 VisitForStackValue(args->at(1)); // value
3071 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003072 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003073
3074 if (FLAG_debug_code) {
3075 __ TestIfSmi(value, r0);
3076 __ Check(eq, kNonSmiValue, cr0);
3077 __ TestIfSmi(index, r0);
3078 __ Check(eq, kNonSmiIndex, cr0);
3079 __ SmiUntag(index, index);
3080 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3081 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3082 __ SmiTag(index, index);
3083 }
3084
3085 __ SmiUntag(value);
3086 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3087 __ SmiToByteArrayOffset(r0, index);
3088 __ stbx(value, MemOperand(ip, r0));
3089 context()->Plug(string);
3090}
3091
3092
3093void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3094 ZoneList<Expression*>* args = expr->arguments();
3095 DCHECK_EQ(3, args->length());
3096
3097 Register string = r3;
3098 Register index = r4;
3099 Register value = r5;
3100
3101 VisitForStackValue(args->at(0)); // index
3102 VisitForStackValue(args->at(1)); // value
3103 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003104 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003105
3106 if (FLAG_debug_code) {
3107 __ TestIfSmi(value, r0);
3108 __ Check(eq, kNonSmiValue, cr0);
3109 __ TestIfSmi(index, r0);
3110 __ Check(eq, kNonSmiIndex, cr0);
3111 __ SmiUntag(index, index);
3112 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3113 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3114 __ SmiTag(index, index);
3115 }
3116
3117 __ SmiUntag(value);
3118 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3119 __ SmiToShortArrayOffset(r0, index);
3120 __ sthx(value, MemOperand(ip, r0));
3121 context()->Plug(string);
3122}
3123
3124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003125void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3126 ZoneList<Expression*>* args = expr->arguments();
3127 DCHECK(args->length() == 1);
3128 VisitForAccumulatorValue(args->at(0));
3129
3130 Label done;
3131 StringCharFromCodeGenerator generator(r3, r4);
3132 generator.GenerateFast(masm_);
3133 __ b(&done);
3134
3135 NopRuntimeCallHelper call_helper;
3136 generator.GenerateSlow(masm_, call_helper);
3137
3138 __ bind(&done);
3139 context()->Plug(r4);
3140}
3141
3142
3143void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3144 ZoneList<Expression*>* args = expr->arguments();
3145 DCHECK(args->length() == 2);
3146 VisitForStackValue(args->at(0));
3147 VisitForAccumulatorValue(args->at(1));
3148
3149 Register object = r4;
3150 Register index = r3;
3151 Register result = r6;
3152
Ben Murdoch097c5b22016-05-18 11:27:45 +01003153 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003154
3155 Label need_conversion;
3156 Label index_out_of_range;
3157 Label done;
3158 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
3159 &need_conversion, &index_out_of_range,
3160 STRING_INDEX_IS_NUMBER);
3161 generator.GenerateFast(masm_);
3162 __ b(&done);
3163
3164 __ bind(&index_out_of_range);
3165 // When the index is out of range, the spec requires us to return
3166 // NaN.
3167 __ LoadRoot(result, Heap::kNanValueRootIndex);
3168 __ b(&done);
3169
3170 __ bind(&need_conversion);
3171 // Load the undefined value into the result register, which will
3172 // trigger conversion.
3173 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3174 __ b(&done);
3175
3176 NopRuntimeCallHelper call_helper;
3177 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3178
3179 __ bind(&done);
3180 context()->Plug(result);
3181}
3182
3183
3184void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3185 ZoneList<Expression*>* args = expr->arguments();
3186 DCHECK(args->length() == 2);
3187 VisitForStackValue(args->at(0));
3188 VisitForAccumulatorValue(args->at(1));
3189
3190 Register object = r4;
3191 Register index = r3;
3192 Register scratch = r6;
3193 Register result = r3;
3194
Ben Murdoch097c5b22016-05-18 11:27:45 +01003195 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003196
3197 Label need_conversion;
3198 Label index_out_of_range;
3199 Label done;
3200 StringCharAtGenerator generator(object, index, scratch, result,
3201 &need_conversion, &need_conversion,
3202 &index_out_of_range, STRING_INDEX_IS_NUMBER);
3203 generator.GenerateFast(masm_);
3204 __ b(&done);
3205
3206 __ bind(&index_out_of_range);
3207 // When the index is out of range, the spec requires us to return
3208 // the empty string.
3209 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3210 __ b(&done);
3211
3212 __ bind(&need_conversion);
3213 // Move smi zero into the result register, which will trigger
3214 // conversion.
3215 __ LoadSmiLiteral(result, Smi::FromInt(0));
3216 __ b(&done);
3217
3218 NopRuntimeCallHelper call_helper;
3219 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3220
3221 __ bind(&done);
3222 context()->Plug(result);
3223}
3224
3225
3226void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3227 ZoneList<Expression*>* args = expr->arguments();
3228 DCHECK_LE(2, args->length());
3229 // Push target, receiver and arguments onto the stack.
3230 for (Expression* const arg : *args) {
3231 VisitForStackValue(arg);
3232 }
3233 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3234 // Move target to r4.
3235 int const argc = args->length() - 2;
3236 __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize));
3237 // Call the target.
3238 __ mov(r3, Operand(argc));
3239 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003240 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003241 // Restore context register.
3242 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3243 // Discard the function left on TOS.
3244 context()->DropAndPlug(1, r3);
3245}
3246
3247
3248void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3249 ZoneList<Expression*>* args = expr->arguments();
3250 VisitForAccumulatorValue(args->at(0));
3251
3252 Label materialize_true, materialize_false;
3253 Label* if_true = NULL;
3254 Label* if_false = NULL;
3255 Label* fall_through = NULL;
3256 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3257 &if_false, &fall_through);
3258
3259 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3260 // PPC - assume ip is free
3261 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
3262 __ and_(r0, r3, ip, SetRC);
3263 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3264 Split(eq, if_true, if_false, fall_through, cr0);
3265
3266 context()->Plug(if_true, if_false);
3267}
3268
3269
3270void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3271 ZoneList<Expression*>* args = expr->arguments();
3272 DCHECK(args->length() == 1);
3273 VisitForAccumulatorValue(args->at(0));
3274
3275 __ AssertString(r3);
3276
3277 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3278 __ IndexFromHash(r3, r3);
3279
3280 context()->Plug(r3);
3281}
3282
3283
3284void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3285 ZoneList<Expression*>* args = expr->arguments();
3286 DCHECK_EQ(1, args->length());
3287 VisitForAccumulatorValue(args->at(0));
3288 __ AssertFunction(r3);
3289 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3290 __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset));
3291 context()->Plug(r3);
3292}
3293
Ben Murdochda12d292016-06-02 14:46:10 +01003294void FullCodeGenerator::EmitGetOrdinaryHasInstance(CallRuntime* expr) {
3295 DCHECK_EQ(0, expr->arguments()->length());
3296 __ LoadNativeContextSlot(Context::ORDINARY_HAS_INSTANCE_INDEX, r3);
3297 context()->Plug(r3);
3298}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003299
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003300void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3301 DCHECK(expr->arguments()->length() == 0);
3302 ExternalReference debug_is_active =
3303 ExternalReference::debug_is_active_address(isolate());
3304 __ mov(ip, Operand(debug_is_active));
3305 __ lbz(r3, MemOperand(ip));
3306 __ SmiTag(r3);
3307 context()->Plug(r3);
3308}
3309
3310
3311void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3312 ZoneList<Expression*>* args = expr->arguments();
3313 DCHECK_EQ(2, args->length());
3314 VisitForStackValue(args->at(0));
3315 VisitForStackValue(args->at(1));
3316
3317 Label runtime, done;
3318
3319 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime, TAG_OBJECT);
3320 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
3321 __ Pop(r5, r6);
3322 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
3323 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
3324 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
3325 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
3326 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
3327 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
3328 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3329 __ b(&done);
3330
3331 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003332 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003333
3334 __ bind(&done);
3335 context()->Plug(r3);
3336}
3337
3338
3339void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003340 // Push function.
3341 __ LoadNativeContextSlot(expr->context_index(), r3);
3342 PushOperand(r3);
3343
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003344 // Push undefined as the receiver.
3345 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003346 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003347}
3348
3349
3350void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3351 ZoneList<Expression*>* args = expr->arguments();
3352 int arg_count = args->length();
3353
3354 SetCallPosition(expr);
3355 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3356 __ mov(r3, Operand(arg_count));
3357 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3358 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003359 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003360
Ben Murdochda12d292016-06-02 14:46:10 +01003361 // Restore context register.
3362 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003363}
3364
3365
3366void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3367 switch (expr->op()) {
3368 case Token::DELETE: {
3369 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3370 Property* property = expr->expression()->AsProperty();
3371 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3372
3373 if (property != NULL) {
3374 VisitForStackValue(property->obj());
3375 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003376 CallRuntimeWithOperands(is_strict(language_mode())
3377 ? Runtime::kDeleteProperty_Strict
3378 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003379 context()->Plug(r3);
3380 } else if (proxy != NULL) {
3381 Variable* var = proxy->var();
3382 // Delete of an unqualified identifier is disallowed in strict mode but
3383 // "delete this" is allowed.
3384 bool is_this = var->HasThisName(isolate());
3385 DCHECK(is_sloppy(language_mode()) || is_this);
3386 if (var->IsUnallocatedOrGlobalSlot()) {
3387 __ LoadGlobalObject(r5);
3388 __ mov(r4, Operand(var->name()));
3389 __ Push(r5, r4);
3390 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3391 context()->Plug(r3);
3392 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3393 // Result of deleting non-global, non-dynamic variables is false.
3394 // The subexpression does not have side effects.
3395 context()->Plug(is_this);
3396 } else {
3397 // Non-global variable. Call the runtime to try to delete from the
3398 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003399 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003400 __ CallRuntime(Runtime::kDeleteLookupSlot);
3401 context()->Plug(r3);
3402 }
3403 } else {
3404 // Result of deleting non-property, non-variable reference is true.
3405 // The subexpression may have side effects.
3406 VisitForEffect(expr->expression());
3407 context()->Plug(true);
3408 }
3409 break;
3410 }
3411
3412 case Token::VOID: {
3413 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3414 VisitForEffect(expr->expression());
3415 context()->Plug(Heap::kUndefinedValueRootIndex);
3416 break;
3417 }
3418
3419 case Token::NOT: {
3420 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3421 if (context()->IsEffect()) {
3422 // Unary NOT has no side effects so it's only necessary to visit the
3423 // subexpression. Match the optimizing compiler by not branching.
3424 VisitForEffect(expr->expression());
3425 } else if (context()->IsTest()) {
3426 const TestContext* test = TestContext::cast(context());
3427 // The labels are swapped for the recursive call.
3428 VisitForControl(expr->expression(), test->false_label(),
3429 test->true_label(), test->fall_through());
3430 context()->Plug(test->true_label(), test->false_label());
3431 } else {
3432 // We handle value contexts explicitly rather than simply visiting
3433 // for control and plugging the control flow into the context,
3434 // because we need to prepare a pair of extra administrative AST ids
3435 // for the optimizing compiler.
3436 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3437 Label materialize_true, materialize_false, done;
3438 VisitForControl(expr->expression(), &materialize_false,
3439 &materialize_true, &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003440 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003441 __ bind(&materialize_true);
3442 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3443 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
3444 if (context()->IsStackValue()) __ push(r3);
3445 __ b(&done);
3446 __ bind(&materialize_false);
3447 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3448 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
3449 if (context()->IsStackValue()) __ push(r3);
3450 __ bind(&done);
3451 }
3452 break;
3453 }
3454
3455 case Token::TYPEOF: {
3456 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3457 {
3458 AccumulatorValueContext context(this);
3459 VisitForTypeofValue(expr->expression());
3460 }
3461 __ mr(r6, r3);
3462 TypeofStub typeof_stub(isolate());
3463 __ CallStub(&typeof_stub);
3464 context()->Plug(r3);
3465 break;
3466 }
3467
3468 default:
3469 UNREACHABLE();
3470 }
3471}
3472
3473
3474void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3475 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3476
3477 Comment cmnt(masm_, "[ CountOperation");
3478
3479 Property* prop = expr->expression()->AsProperty();
3480 LhsKind assign_type = Property::GetAssignType(prop);
3481
3482 // Evaluate expression and get value.
3483 if (assign_type == VARIABLE) {
3484 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3485 AccumulatorValueContext context(this);
3486 EmitVariableLoad(expr->expression()->AsVariableProxy());
3487 } else {
3488 // Reserve space for result of postfix operation.
3489 if (expr->is_postfix() && !context()->IsEffect()) {
3490 __ LoadSmiLiteral(ip, Smi::FromInt(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003491 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003492 }
3493 switch (assign_type) {
3494 case NAMED_PROPERTY: {
3495 // Put the object both on the stack and in the register.
3496 VisitForStackValue(prop->obj());
3497 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3498 EmitNamedPropertyLoad(prop);
3499 break;
3500 }
3501
3502 case NAMED_SUPER_PROPERTY: {
3503 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3504 VisitForAccumulatorValue(
3505 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003506 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003507 const Register scratch = r4;
3508 __ LoadP(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003509 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003510 EmitNamedSuperPropertyLoad(prop);
3511 break;
3512 }
3513
3514 case KEYED_SUPER_PROPERTY: {
3515 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3516 VisitForAccumulatorValue(
3517 prop->obj()->AsSuperPropertyReference()->home_object());
3518 const Register scratch = r4;
3519 const Register scratch1 = r5;
3520 __ mr(scratch, result_register());
3521 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003522 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003523 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003524 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003525 EmitKeyedSuperPropertyLoad(prop);
3526 break;
3527 }
3528
3529 case KEYED_PROPERTY: {
3530 VisitForStackValue(prop->obj());
3531 VisitForStackValue(prop->key());
3532 __ LoadP(LoadDescriptor::ReceiverRegister(),
3533 MemOperand(sp, 1 * kPointerSize));
3534 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3535 EmitKeyedPropertyLoad(prop);
3536 break;
3537 }
3538
3539 case VARIABLE:
3540 UNREACHABLE();
3541 }
3542 }
3543
3544 // We need a second deoptimization point after loading the value
3545 // in case evaluating the property load my have a side effect.
3546 if (assign_type == VARIABLE) {
3547 PrepareForBailout(expr->expression(), TOS_REG);
3548 } else {
3549 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3550 }
3551
3552 // Inline smi case if we are in a loop.
3553 Label stub_call, done;
3554 JumpPatchSite patch_site(masm_);
3555
3556 int count_value = expr->op() == Token::INC ? 1 : -1;
3557 if (ShouldInlineSmiCase(expr->op())) {
3558 Label slow;
3559 patch_site.EmitJumpIfNotSmi(r3, &slow);
3560
3561 // Save result for postfix expressions.
3562 if (expr->is_postfix()) {
3563 if (!context()->IsEffect()) {
3564 // Save the result on the stack. If we have a named or keyed property
3565 // we store the result under the receiver that is currently on top
3566 // of the stack.
3567 switch (assign_type) {
3568 case VARIABLE:
3569 __ push(r3);
3570 break;
3571 case NAMED_PROPERTY:
3572 __ StoreP(r3, MemOperand(sp, kPointerSize));
3573 break;
3574 case NAMED_SUPER_PROPERTY:
3575 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3576 break;
3577 case KEYED_PROPERTY:
3578 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3579 break;
3580 case KEYED_SUPER_PROPERTY:
3581 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3582 break;
3583 }
3584 }
3585 }
3586
3587 Register scratch1 = r4;
3588 Register scratch2 = r5;
3589 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
3590 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
3591 __ BranchOnNoOverflow(&done);
3592 // Call stub. Undo operation first.
3593 __ sub(r3, r3, scratch1);
3594 __ b(&stub_call);
3595 __ bind(&slow);
3596 }
Ben Murdochda12d292016-06-02 14:46:10 +01003597
3598 // Convert old value into a number.
3599 ToNumberStub convert_stub(isolate());
3600 __ CallStub(&convert_stub);
3601 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003602
3603 // Save result for postfix expressions.
3604 if (expr->is_postfix()) {
3605 if (!context()->IsEffect()) {
3606 // Save the result on the stack. If we have a named or keyed property
3607 // we store the result under the receiver that is currently on top
3608 // of the stack.
3609 switch (assign_type) {
3610 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003611 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003612 break;
3613 case NAMED_PROPERTY:
3614 __ StoreP(r3, MemOperand(sp, kPointerSize));
3615 break;
3616 case NAMED_SUPER_PROPERTY:
3617 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3618 break;
3619 case KEYED_PROPERTY:
3620 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3621 break;
3622 case KEYED_SUPER_PROPERTY:
3623 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3624 break;
3625 }
3626 }
3627 }
3628
3629 __ bind(&stub_call);
3630 __ mr(r4, r3);
3631 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
3632
3633 SetExpressionPosition(expr);
3634
Ben Murdoch097c5b22016-05-18 11:27:45 +01003635 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003636 CallIC(code, expr->CountBinOpFeedbackId());
3637 patch_site.EmitPatchInfo();
3638 __ bind(&done);
3639
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003640 // Store the value returned in r3.
3641 switch (assign_type) {
3642 case VARIABLE:
3643 if (expr->is_postfix()) {
3644 {
3645 EffectContext context(this);
3646 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3647 Token::ASSIGN, expr->CountSlot());
3648 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3649 context.Plug(r3);
3650 }
3651 // For all contexts except EffectConstant We have the result on
3652 // top of the stack.
3653 if (!context()->IsEffect()) {
3654 context()->PlugTOS();
3655 }
3656 } else {
3657 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3658 Token::ASSIGN, expr->CountSlot());
3659 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3660 context()->Plug(r3);
3661 }
3662 break;
3663 case NAMED_PROPERTY: {
3664 __ mov(StoreDescriptor::NameRegister(),
3665 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003666 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003667 EmitLoadStoreICSlot(expr->CountSlot());
3668 CallStoreIC();
3669 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3670 if (expr->is_postfix()) {
3671 if (!context()->IsEffect()) {
3672 context()->PlugTOS();
3673 }
3674 } else {
3675 context()->Plug(r3);
3676 }
3677 break;
3678 }
3679 case NAMED_SUPER_PROPERTY: {
3680 EmitNamedSuperPropertyStore(prop);
3681 if (expr->is_postfix()) {
3682 if (!context()->IsEffect()) {
3683 context()->PlugTOS();
3684 }
3685 } else {
3686 context()->Plug(r3);
3687 }
3688 break;
3689 }
3690 case KEYED_SUPER_PROPERTY: {
3691 EmitKeyedSuperPropertyStore(prop);
3692 if (expr->is_postfix()) {
3693 if (!context()->IsEffect()) {
3694 context()->PlugTOS();
3695 }
3696 } else {
3697 context()->Plug(r3);
3698 }
3699 break;
3700 }
3701 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003702 PopOperands(StoreDescriptor::ReceiverRegister(),
3703 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003704 Handle<Code> ic =
3705 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3706 EmitLoadStoreICSlot(expr->CountSlot());
3707 CallIC(ic);
3708 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3709 if (expr->is_postfix()) {
3710 if (!context()->IsEffect()) {
3711 context()->PlugTOS();
3712 }
3713 } else {
3714 context()->Plug(r3);
3715 }
3716 break;
3717 }
3718 }
3719}
3720
3721
3722void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3723 Expression* sub_expr,
3724 Handle<String> check) {
3725 Label materialize_true, materialize_false;
3726 Label* if_true = NULL;
3727 Label* if_false = NULL;
3728 Label* fall_through = NULL;
3729 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3730 &if_false, &fall_through);
3731
3732 {
3733 AccumulatorValueContext context(this);
3734 VisitForTypeofValue(sub_expr);
3735 }
3736 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3737
3738 Factory* factory = isolate()->factory();
3739 if (String::Equals(check, factory->number_string())) {
3740 __ JumpIfSmi(r3, if_true);
3741 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3742 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3743 __ cmp(r3, ip);
3744 Split(eq, if_true, if_false, fall_through);
3745 } else if (String::Equals(check, factory->string_string())) {
3746 __ JumpIfSmi(r3, if_false);
3747 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
3748 Split(lt, if_true, if_false, fall_through);
3749 } else if (String::Equals(check, factory->symbol_string())) {
3750 __ JumpIfSmi(r3, if_false);
3751 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
3752 Split(eq, if_true, if_false, fall_through);
3753 } else if (String::Equals(check, factory->boolean_string())) {
3754 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3755 __ beq(if_true);
3756 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
3757 Split(eq, if_true, if_false, fall_through);
3758 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003759 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3760 __ beq(if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003761 __ JumpIfSmi(r3, if_false);
3762 // Check for undetectable objects => true.
3763 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3764 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3765 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3766 Split(ne, if_true, if_false, fall_through, cr0);
3767
3768 } else if (String::Equals(check, factory->function_string())) {
3769 __ JumpIfSmi(r3, if_false);
3770 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3771 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3772 __ andi(r4, r4,
3773 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3774 __ cmpi(r4, Operand(1 << Map::kIsCallable));
3775 Split(eq, if_true, if_false, fall_through);
3776 } else if (String::Equals(check, factory->object_string())) {
3777 __ JumpIfSmi(r3, if_false);
3778 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3779 __ beq(if_true);
3780 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3781 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
3782 __ blt(if_false);
3783 // Check for callable or undetectable objects => false.
3784 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3785 __ andi(r0, r4,
3786 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3787 Split(eq, if_true, if_false, fall_through, cr0);
3788// clang-format off
3789#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3790 } else if (String::Equals(check, factory->type##_string())) { \
3791 __ JumpIfSmi(r3, if_false); \
3792 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); \
3793 __ CompareRoot(r3, Heap::k##Type##MapRootIndex); \
3794 Split(eq, if_true, if_false, fall_through);
3795 SIMD128_TYPES(SIMD128_TYPE)
3796#undef SIMD128_TYPE
3797 // clang-format on
3798 } else {
3799 if (if_false != fall_through) __ b(if_false);
3800 }
3801 context()->Plug(if_true, if_false);
3802}
3803
3804
3805void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3806 Comment cmnt(masm_, "[ CompareOperation");
3807 SetExpressionPosition(expr);
3808
3809 // First we try a fast inlined version of the compare when one of
3810 // the operands is a literal.
3811 if (TryLiteralCompare(expr)) return;
3812
3813 // Always perform the comparison for its control flow. Pack the result
3814 // into the expression's context after the comparison is performed.
3815 Label materialize_true, materialize_false;
3816 Label* if_true = NULL;
3817 Label* if_false = NULL;
3818 Label* fall_through = NULL;
3819 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3820 &if_false, &fall_through);
3821
3822 Token::Value op = expr->op();
3823 VisitForStackValue(expr->left());
3824 switch (op) {
3825 case Token::IN:
3826 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003827 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003828 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3829 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3830 Split(eq, if_true, if_false, fall_through);
3831 break;
3832
3833 case Token::INSTANCEOF: {
3834 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003835 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003836 InstanceOfStub stub(isolate());
3837 __ CallStub(&stub);
3838 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3839 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3840 Split(eq, if_true, if_false, fall_through);
3841 break;
3842 }
3843
3844 default: {
3845 VisitForAccumulatorValue(expr->right());
3846 Condition cond = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003847 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003848
3849 bool inline_smi_code = ShouldInlineSmiCase(op);
3850 JumpPatchSite patch_site(masm_);
3851 if (inline_smi_code) {
3852 Label slow_case;
3853 __ orx(r5, r3, r4);
3854 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
3855 __ cmp(r4, r3);
3856 Split(cond, if_true, if_false, NULL);
3857 __ bind(&slow_case);
3858 }
3859
Ben Murdoch097c5b22016-05-18 11:27:45 +01003860 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003861 CallIC(ic, expr->CompareOperationFeedbackId());
3862 patch_site.EmitPatchInfo();
3863 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3864 __ cmpi(r3, Operand::Zero());
3865 Split(cond, if_true, if_false, fall_through);
3866 }
3867 }
3868
3869 // Convert the result of the comparison into one expected for this
3870 // expression's context.
3871 context()->Plug(if_true, if_false);
3872}
3873
3874
3875void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3876 Expression* sub_expr,
3877 NilValue nil) {
3878 Label materialize_true, materialize_false;
3879 Label* if_true = NULL;
3880 Label* if_false = NULL;
3881 Label* fall_through = NULL;
3882 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3883 &if_false, &fall_through);
3884
3885 VisitForAccumulatorValue(sub_expr);
3886 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3887 if (expr->op() == Token::EQ_STRICT) {
3888 Heap::RootListIndex nil_value = nil == kNullValue
3889 ? Heap::kNullValueRootIndex
3890 : Heap::kUndefinedValueRootIndex;
3891 __ LoadRoot(r4, nil_value);
3892 __ cmp(r3, r4);
3893 Split(eq, if_true, if_false, fall_through);
3894 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003895 __ JumpIfSmi(r3, if_false);
3896 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3897 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3898 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3899 Split(ne, if_true, if_false, fall_through, cr0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003900 }
3901 context()->Plug(if_true, if_false);
3902}
3903
3904
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003905Register FullCodeGenerator::result_register() { return r3; }
3906
3907
3908Register FullCodeGenerator::context_register() { return cp; }
3909
Ben Murdochda12d292016-06-02 14:46:10 +01003910void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3911 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3912 __ LoadP(value, MemOperand(fp, frame_offset), r0);
3913}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003914
3915void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3916 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3917 __ StoreP(value, MemOperand(fp, frame_offset), r0);
3918}
3919
3920
3921void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3922 __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
3923}
3924
3925
3926void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3927 Scope* closure_scope = scope()->ClosureScope();
3928 if (closure_scope->is_script_scope() ||
3929 closure_scope->is_module_scope()) {
3930 // Contexts nested in the native context have a canonical empty function
3931 // as their closure, not the anonymous closure containing the global
3932 // code.
3933 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3934 } else if (closure_scope->is_eval_scope()) {
3935 // Contexts created by a call to eval have the same closure as the
3936 // context calling eval, not the anonymous closure containing the eval
3937 // code. Fetch it from the context.
3938 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3939 } else {
3940 DCHECK(closure_scope->is_function_scope());
3941 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3942 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003943 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003944}
3945
3946
3947// ----------------------------------------------------------------------------
3948// Non-local control flow support.
3949
3950void FullCodeGenerator::EnterFinallyBlock() {
3951 DCHECK(!result_register().is(r4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003952 // Store pending message while executing finally block.
3953 ExternalReference pending_message_obj =
3954 ExternalReference::address_of_pending_message_obj(isolate());
3955 __ mov(ip, Operand(pending_message_obj));
3956 __ LoadP(r4, MemOperand(ip));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003957 PushOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003958
3959 ClearPendingMessage();
3960}
3961
3962
3963void FullCodeGenerator::ExitFinallyBlock() {
3964 DCHECK(!result_register().is(r4));
3965 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003966 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003967 ExternalReference pending_message_obj =
3968 ExternalReference::address_of_pending_message_obj(isolate());
3969 __ mov(ip, Operand(pending_message_obj));
3970 __ StoreP(r4, MemOperand(ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003971}
3972
3973
3974void FullCodeGenerator::ClearPendingMessage() {
3975 DCHECK(!result_register().is(r4));
3976 ExternalReference pending_message_obj =
3977 ExternalReference::address_of_pending_message_obj(isolate());
3978 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
3979 __ mov(ip, Operand(pending_message_obj));
3980 __ StoreP(r4, MemOperand(ip));
3981}
3982
3983
Ben Murdoch097c5b22016-05-18 11:27:45 +01003984void FullCodeGenerator::DeferredCommands::EmitCommands() {
3985 DCHECK(!result_register().is(r4));
3986 // Restore the accumulator (r3) and token (r4).
3987 __ Pop(r4, result_register());
3988 for (DeferredCommand cmd : commands_) {
3989 Label skip;
3990 __ CmpSmiLiteral(r4, Smi::FromInt(cmd.token), r0);
3991 __ bne(&skip);
3992 switch (cmd.command) {
3993 case kReturn:
3994 codegen_->EmitUnwindAndReturn();
3995 break;
3996 case kThrow:
3997 __ Push(result_register());
3998 __ CallRuntime(Runtime::kReThrow);
3999 break;
4000 case kContinue:
4001 codegen_->EmitContinue(cmd.target);
4002 break;
4003 case kBreak:
4004 codegen_->EmitBreak(cmd.target);
4005 break;
4006 }
4007 __ bind(&skip);
4008 }
4009}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004010
4011#undef __
4012
4013
4014void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
4015 BackEdgeState target_state,
4016 Code* replacement_code) {
4017 Address mov_address = Assembler::target_address_from_return_address(pc);
4018 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4019 Isolate* isolate = unoptimized_code->GetIsolate();
4020 CodePatcher patcher(isolate, cmp_address, 1);
4021
4022 switch (target_state) {
4023 case INTERRUPT: {
4024 // <decrement profiling counter>
4025 // cmpi r6, 0
4026 // bge <ok> ;; not changed
4027 // mov r12, <interrupt stub address>
4028 // mtlr r12
4029 // blrl
4030 // <reset profiling counter>
4031 // ok-label
4032 patcher.masm()->cmpi(r6, Operand::Zero());
4033 break;
4034 }
4035 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004036 // <decrement profiling counter>
4037 // crset
4038 // bge <ok> ;; not changed
4039 // mov r12, <on-stack replacement address>
4040 // mtlr r12
4041 // blrl
4042 // <reset profiling counter>
4043 // ok-label ----- pc_after points here
4044
4045 // Set the LT bit such that bge is a NOP
4046 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
4047 break;
4048 }
4049
4050 // Replace the stack check address in the mov sequence with the
4051 // entry address of the replacement code.
4052 Assembler::set_target_address_at(isolate, mov_address, unoptimized_code,
4053 replacement_code->entry());
4054
4055 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4056 unoptimized_code, mov_address, replacement_code);
4057}
4058
4059
4060BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4061 Isolate* isolate, Code* unoptimized_code, Address pc) {
4062 Address mov_address = Assembler::target_address_from_return_address(pc);
4063 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01004064#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004065 Address interrupt_address =
4066 Assembler::target_address_at(mov_address, unoptimized_code);
Ben Murdochda12d292016-06-02 14:46:10 +01004067#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004068
4069 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
4070 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
4071 return INTERRUPT;
4072 }
4073
4074 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
4075
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004076 DCHECK(interrupt_address ==
Ben Murdochda12d292016-06-02 14:46:10 +01004077 isolate->builtins()->OnStackReplacement()->entry());
4078 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004079}
4080} // namespace internal
4081} // namespace v8
4082#endif // V8_TARGET_ARCH_PPC