blob: 50248c1321b760279bc804c8b407c850e8082014 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_PPC
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/ppc/code-stubs-ppc.h"
17#include "src/ppc/macro-assembler-ppc.h"
18
19namespace v8 {
20namespace internal {
21
Ben Murdoch097c5b22016-05-18 11:27:45 +010022#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30// See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
31class JumpPatchSite BASE_EMBEDDED {
32 public:
33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
34#ifdef DEBUG
35 info_emitted_ = false;
36#endif
37 }
38
39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
40
41 // When initially emitting this ensure that a jump is always generated to skip
42 // the inlined smi code.
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 DCHECK(!patch_site_.is_bound() && !info_emitted_);
45 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
46 __ bind(&patch_site_);
47 __ cmp(reg, reg, cr0);
48 __ beq(target, cr0); // Always taken before patched.
49 }
50
51 // When initially emitting this ensure that a jump is never generated to skip
52 // the inlined smi code.
53 void EmitJumpIfSmi(Register reg, Label* target) {
54 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 __ bind(&patch_site_);
57 __ cmp(reg, reg, cr0);
58 __ bne(target, cr0); // Never taken before patched.
59 }
60
61 void EmitPatchInfo() {
62 if (patch_site_.is_bound()) {
63 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
64 Register reg;
65 // I believe this is using reg as the high bits of of the offset
66 reg.set_code(delta_to_patch_site / kOff16Mask);
67 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
68#ifdef DEBUG
69 info_emitted_ = true;
70#endif
71 } else {
72 __ nop(); // Signals no inlined code.
73 }
74 }
75
76 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010077 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 MacroAssembler* masm_;
79 Label patch_site_;
80#ifdef DEBUG
81 bool info_emitted_;
82#endif
83};
84
85
86// Generate code for a JS function. On entry to the function the receiver
87// and arguments have been pushed on the stack left to right. The actual
88// argument count matches the formal parameter count expected by the
89// function.
90//
91// The live registers are:
92// o r4: the JS function object being called (i.e., ourselves)
93// o r6: the new target value
94// o cp: our context
95// o fp: our caller's frame pointer (aka r31)
96// o sp: stack pointer
97// o lr: return address
98// o ip: our own function entry (required by the prologue)
99//
100// The function builds a JS frame. Please see JavaScriptFrameConstants in
101// frames-ppc.h for its layout.
102void FullCodeGenerator::Generate() {
103 CompilationInfo* info = info_;
104 profiling_counter_ = isolate()->factory()->NewCell(
105 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
106 SetFunctionPosition(literal());
107 Comment cmnt(masm_, "[ function compiled by full code generator");
108
109 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
112 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
113 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
114 __ AssertNotSmi(r5);
115 __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE);
116 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
117 }
118
119 // Open a frame scope to indicate that there is a frame on the stack. The
120 // MANUAL indicates that the scope shouldn't actually generate code to set up
121 // the frame (that is done below).
122 FrameScope frame_scope(masm_, StackFrame::MANUAL);
123 int prologue_offset = masm_->pc_offset();
124
125 if (prologue_offset) {
126 // Prologue logic requires it's starting address in ip and the
127 // corresponding offset from the function entry.
128 prologue_offset += Instruction::kInstrSize;
129 __ addi(ip, ip, Operand(prologue_offset));
130 }
131 info->set_prologue_offset(prologue_offset);
132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
133
134 {
135 Comment cmnt(masm_, "[ Allocate locals");
136 int locals_count = info->scope()->num_stack_slots();
137 // Generators allocate locals, if any, in context slots.
138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140 if (locals_count > 0) {
141 if (locals_count >= 128) {
142 Label ok;
143 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
145 __ cmpl(ip, r5);
146 __ bc_short(ge, &ok);
147 __ CallRuntime(Runtime::kThrowStackOverflow);
148 __ bind(&ok);
149 }
150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
152 if (locals_count >= kMaxPushes) {
153 int loop_iterations = locals_count / kMaxPushes;
154 __ mov(r5, Operand(loop_iterations));
155 __ mtctr(r5);
156 Label loop_header;
157 __ bind(&loop_header);
158 // Do pushes.
159 for (int i = 0; i < kMaxPushes; i++) {
160 __ push(ip);
161 }
162 // Continue loop if not done.
163 __ bdnz(&loop_header);
164 }
165 int remaining = locals_count % kMaxPushes;
166 // Emit the remaining pushes.
167 for (int i = 0; i < remaining; i++) {
168 __ push(ip);
169 }
170 }
171 }
172
173 bool function_in_register_r4 = true;
174
175 // Possibly allocate a local context.
176 if (info->scope()->num_heap_slots() > 0) {
177 // Argument to NewContext is the function, which is still in r4.
178 Comment cmnt(masm_, "[ Allocate context");
179 bool need_write_barrier = true;
180 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
181 if (info->scope()->is_script_scope()) {
182 __ push(r4);
183 __ Push(info->scope()->GetScopeInfo(info->isolate()));
184 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100185 PrepareForBailoutForId(BailoutId::ScriptContext(),
186 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000187 // The new target value is not used, clobbering is safe.
188 DCHECK_NULL(info->scope()->new_target_var());
189 } else {
190 if (info->scope()->new_target_var() != nullptr) {
191 __ push(r6); // Preserve new target.
192 }
193 if (slots <= FastNewContextStub::kMaximumSlots) {
194 FastNewContextStub stub(isolate(), slots);
195 __ CallStub(&stub);
196 // Result of FastNewContextStub is always in new space.
197 need_write_barrier = false;
198 } else {
199 __ push(r4);
200 __ CallRuntime(Runtime::kNewFunctionContext);
201 }
202 if (info->scope()->new_target_var() != nullptr) {
203 __ pop(r6); // Preserve new target.
204 }
205 }
206 function_in_register_r4 = false;
207 // Context is returned in r3. It replaces the context passed to us.
208 // It's saved in the stack and kept live in cp.
209 __ mr(cp, r3);
210 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
211 // Copy any necessary parameters into the context.
212 int num_parameters = info->scope()->num_parameters();
213 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
214 for (int i = first_parameter; i < num_parameters; i++) {
215 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
216 if (var->IsContextSlot()) {
217 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
218 (num_parameters - 1 - i) * kPointerSize;
219 // Load parameter from stack.
220 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
221 // Store it in the context.
222 MemOperand target = ContextMemOperand(cp, var->index());
223 __ StoreP(r3, target, r0);
224
225 // Update the write barrier.
226 if (need_write_barrier) {
227 __ RecordWriteContextSlot(cp, target.offset(), r3, r5,
228 kLRHasBeenSaved, kDontSaveFPRegs);
229 } else if (FLAG_debug_code) {
230 Label done;
231 __ JumpIfInNewSpace(cp, r3, &done);
232 __ Abort(kExpectedNewSpaceObject);
233 __ bind(&done);
234 }
235 }
236 }
237 }
238
239 // Register holding this function and new target are both trashed in case we
240 // bailout here. But since that can happen only when new target is not used
241 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100242 PrepareForBailoutForId(BailoutId::FunctionContext(),
243 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000244
245 // Possibly set up a local binding to the this function which is used in
246 // derived constructors with super calls.
247 Variable* this_function_var = scope()->this_function_var();
248 if (this_function_var != nullptr) {
249 Comment cmnt(masm_, "[ This function");
250 if (!function_in_register_r4) {
251 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
252 // The write barrier clobbers register again, keep it marked as such.
253 }
254 SetVar(this_function_var, r4, r3, r5);
255 }
256
257 // Possibly set up a local binding to the new target value.
258 Variable* new_target_var = scope()->new_target_var();
259 if (new_target_var != nullptr) {
260 Comment cmnt(masm_, "[ new.target");
261 SetVar(new_target_var, r6, r3, r5);
262 }
263
264 // Possibly allocate RestParameters
265 int rest_index;
266 Variable* rest_param = scope()->rest_parameter(&rest_index);
267 if (rest_param) {
268 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 if (!function_in_register_r4) {
270 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
271 }
272 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100274 function_in_register_r4 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 SetVar(rest_param, r3, r4, r5);
276 }
277
278 Variable* arguments = scope()->arguments();
279 if (arguments != NULL) {
280 // Function uses arguments object.
281 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000282 if (!function_in_register_r4) {
283 // Load this again, if it's used by the local context below.
284 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
285 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100286 if (is_strict(language_mode()) || !has_simple_parameters()) {
287 FastNewStrictArgumentsStub stub(isolate());
288 __ CallStub(&stub);
289 } else if (literal()->has_duplicate_parameters()) {
290 __ Push(r4);
291 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
292 } else {
293 FastNewSloppyArgumentsStub stub(isolate());
294 __ CallStub(&stub);
295 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296
297 SetVar(arguments, r3, r4, r5);
298 }
299
300 if (FLAG_trace) {
301 __ CallRuntime(Runtime::kTraceEnter);
302 }
303
Ben Murdochda12d292016-06-02 14:46:10 +0100304 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100305 PrepareForBailoutForId(BailoutId::FunctionEntry(),
306 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100307 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000308 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100309 VisitDeclarations(scope()->declarations());
310 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000311
Ben Murdochda12d292016-06-02 14:46:10 +0100312 // Assert that the declarations do not use ICs. Otherwise the debugger
313 // won't be able to redirect a PC at an IC to the correct IC in newly
314 // recompiled code.
315 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000316
Ben Murdochda12d292016-06-02 14:46:10 +0100317 {
318 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100319 PrepareForBailoutForId(BailoutId::Declarations(),
320 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100321 Label ok;
322 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
323 __ cmpl(sp, ip);
324 __ bc_short(ge, &ok);
325 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
326 __ bind(&ok);
327 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328
Ben Murdochda12d292016-06-02 14:46:10 +0100329 {
330 Comment cmnt(masm_, "[ Body");
331 DCHECK(loop_depth() == 0);
332 VisitStatements(literal()->body());
333 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334 }
335
336 // Always emit a 'return undefined' in case control fell off the end of
337 // the body.
338 {
339 Comment cmnt(masm_, "[ return <undefined>;");
340 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
341 }
342 EmitReturnSequence();
343
344 if (HasStackOverflow()) {
345 masm_->AbortConstantPoolBuilding();
346 }
347}
348
349
350void FullCodeGenerator::ClearAccumulator() {
351 __ LoadSmiLiteral(r3, Smi::FromInt(0));
352}
353
354
355void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
356 __ mov(r5, Operand(profiling_counter_));
357 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
358 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
359 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
360}
361
362
363void FullCodeGenerator::EmitProfilingCounterReset() {
364 int reset_value = FLAG_interrupt_budget;
365 __ mov(r5, Operand(profiling_counter_));
366 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
367 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
368}
369
370
371void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
372 Label* back_edge_target) {
373 Comment cmnt(masm_, "[ Back edge bookkeeping");
374 Label ok;
375
376 DCHECK(back_edge_target->is_bound());
377 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
378 kCodeSizeMultiplier / 2;
379 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
380 EmitProfilingCounterDecrement(weight);
381 {
382 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
383 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
384 // BackEdgeTable::PatchAt manipulates this sequence.
385 __ cmpi(r6, Operand::Zero());
386 __ bc_short(ge, &ok);
387 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
388
389 // Record a mapping of this PC offset to the OSR id. This is used to find
390 // the AST id from the unoptimized code in order to use it as a key into
391 // the deoptimization input data found in the optimized code.
392 RecordBackEdge(stmt->OsrEntryId());
393 }
394 EmitProfilingCounterReset();
395
396 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100397 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000398 // Record a mapping of the OSR id to this PC. This is used if the OSR
399 // entry becomes the target of a bailout. We don't expect it to be, but
400 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100401 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402}
403
Ben Murdoch097c5b22016-05-18 11:27:45 +0100404void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
405 bool is_tail_call) {
406 // Pretend that the exit is a backwards jump to the entry.
407 int weight = 1;
408 if (info_->ShouldSelfOptimize()) {
409 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
410 } else {
411 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
412 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
413 }
414 EmitProfilingCounterDecrement(weight);
415 Label ok;
416 __ cmpi(r6, Operand::Zero());
417 __ bge(&ok);
418 // Don't need to save result register if we are going to do a tail call.
419 if (!is_tail_call) {
420 __ push(r3);
421 }
422 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
423 if (!is_tail_call) {
424 __ pop(r3);
425 }
426 EmitProfilingCounterReset();
427 __ bind(&ok);
428}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000429
430void FullCodeGenerator::EmitReturnSequence() {
431 Comment cmnt(masm_, "[ Return sequence");
432 if (return_label_.is_bound()) {
433 __ b(&return_label_);
434 } else {
435 __ bind(&return_label_);
436 if (FLAG_trace) {
437 // Push the return value on the stack as the parameter.
438 // Runtime::TraceExit returns its parameter in r3
439 __ push(r3);
440 __ CallRuntime(Runtime::kTraceExit);
441 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100442 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443
444 // Make sure that the constant pool is not emitted inside of the return
445 // sequence.
446 {
447 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
448 int32_t arg_count = info_->scope()->num_parameters() + 1;
449 int32_t sp_delta = arg_count * kPointerSize;
450 SetReturnPosition(literal());
451 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
452 __ blr();
453 }
454 }
455}
456
Ben Murdochc5610432016-08-08 18:44:38 +0100457void FullCodeGenerator::RestoreContext() {
458 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
459}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000460
461void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
462 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
463 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000465}
466
467
468void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
469
470
471void FullCodeGenerator::AccumulatorValueContext::Plug(
472 Heap::RootListIndex index) const {
473 __ LoadRoot(result_register(), index);
474}
475
476
477void FullCodeGenerator::StackValueContext::Plug(
478 Heap::RootListIndex index) const {
479 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100480 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000481}
482
483
484void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
485 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
486 false_label_);
487 if (index == Heap::kUndefinedValueRootIndex ||
488 index == Heap::kNullValueRootIndex ||
489 index == Heap::kFalseValueRootIndex) {
490 if (false_label_ != fall_through_) __ b(false_label_);
491 } else if (index == Heap::kTrueValueRootIndex) {
492 if (true_label_ != fall_through_) __ b(true_label_);
493 } else {
494 __ LoadRoot(result_register(), index);
495 codegen()->DoTest(this);
496 }
497}
498
499
500void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
501
502
503void FullCodeGenerator::AccumulatorValueContext::Plug(
504 Handle<Object> lit) const {
505 __ mov(result_register(), Operand(lit));
506}
507
508
509void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
510 // Immediates cannot be pushed directly.
511 __ mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100512 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513}
514
515
516void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
517 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
518 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100519 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
521 if (false_label_ != fall_through_) __ b(false_label_);
522 } else if (lit->IsTrue() || lit->IsJSObject()) {
523 if (true_label_ != fall_through_) __ b(true_label_);
524 } else if (lit->IsString()) {
525 if (String::cast(*lit)->length() == 0) {
526 if (false_label_ != fall_through_) __ b(false_label_);
527 } else {
528 if (true_label_ != fall_through_) __ b(true_label_);
529 }
530 } else if (lit->IsSmi()) {
531 if (Smi::cast(*lit)->value() == 0) {
532 if (false_label_ != fall_through_) __ b(false_label_);
533 } else {
534 if (true_label_ != fall_through_) __ b(true_label_);
535 }
536 } else {
537 // For simplicity we always test the accumulator register.
538 __ mov(result_register(), Operand(lit));
539 codegen()->DoTest(this);
540 }
541}
542
543
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
545 Register reg) const {
546 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100547 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000548 __ StoreP(reg, MemOperand(sp, 0));
549}
550
551
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
553 Label* materialize_false) const {
554 DCHECK(materialize_true == materialize_false);
555 __ bind(materialize_true);
556}
557
558
559void FullCodeGenerator::AccumulatorValueContext::Plug(
560 Label* materialize_true, Label* materialize_false) const {
561 Label done;
562 __ bind(materialize_true);
563 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
564 __ b(&done);
565 __ bind(materialize_false);
566 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
567 __ bind(&done);
568}
569
570
571void FullCodeGenerator::StackValueContext::Plug(
572 Label* materialize_true, Label* materialize_false) const {
573 Label done;
574 __ bind(materialize_true);
575 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
576 __ b(&done);
577 __ bind(materialize_false);
578 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
579 __ bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100580 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000581}
582
583
584void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
585 Label* materialize_false) const {
586 DCHECK(materialize_true == true_label_);
587 DCHECK(materialize_false == false_label_);
588}
589
590
591void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
592 Heap::RootListIndex value_root_index =
593 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
594 __ LoadRoot(result_register(), value_root_index);
595}
596
597
598void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
599 Heap::RootListIndex value_root_index =
600 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
601 __ LoadRoot(ip, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100602 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000603}
604
605
606void FullCodeGenerator::TestContext::Plug(bool flag) const {
607 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
608 false_label_);
609 if (flag) {
610 if (true_label_ != fall_through_) __ b(true_label_);
611 } else {
612 if (false_label_ != fall_through_) __ b(false_label_);
613 }
614}
615
616
617void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
618 Label* if_false, Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100619 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 CallIC(ic, condition->test_id());
621 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
622 Split(eq, if_true, if_false, fall_through);
623}
624
625
626void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
627 Label* fall_through, CRegister cr) {
628 if (if_false == fall_through) {
629 __ b(cond, if_true, cr);
630 } else if (if_true == fall_through) {
631 __ b(NegateCondition(cond), if_false, cr);
632 } else {
633 __ b(cond, if_true, cr);
634 __ b(if_false);
635 }
636}
637
638
639MemOperand FullCodeGenerator::StackOperand(Variable* var) {
640 DCHECK(var->IsStackAllocated());
641 // Offset is negative because higher indexes are at lower addresses.
642 int offset = -var->index() * kPointerSize;
643 // Adjust by a (parameter or local) base offset.
644 if (var->IsParameter()) {
645 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
646 } else {
647 offset += JavaScriptFrameConstants::kLocal0Offset;
648 }
649 return MemOperand(fp, offset);
650}
651
652
653MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
654 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
655 if (var->IsContextSlot()) {
656 int context_chain_length = scope()->ContextChainLength(var->scope());
657 __ LoadContext(scratch, context_chain_length);
658 return ContextMemOperand(scratch, var->index());
659 } else {
660 return StackOperand(var);
661 }
662}
663
664
665void FullCodeGenerator::GetVar(Register dest, Variable* var) {
666 // Use destination as scratch.
667 MemOperand location = VarOperand(var, dest);
668 __ LoadP(dest, location, r0);
669}
670
671
672void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
673 Register scratch1) {
674 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
675 DCHECK(!scratch0.is(src));
676 DCHECK(!scratch0.is(scratch1));
677 DCHECK(!scratch1.is(src));
678 MemOperand location = VarOperand(var, scratch0);
679 __ StoreP(src, location, r0);
680
681 // Emit the write barrier code if the location is in the heap.
682 if (var->IsContextSlot()) {
683 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
684 kLRHasBeenSaved, kDontSaveFPRegs);
685 }
686}
687
688
689void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
690 bool should_normalize,
691 Label* if_true,
692 Label* if_false) {
693 // Only prepare for bailouts before splits if we're in a test
694 // context. Otherwise, we let the Visit function deal with the
695 // preparation to avoid preparing with the same AST id twice.
696 if (!context()->IsTest()) return;
697
698 Label skip;
699 if (should_normalize) __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100700 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000701 if (should_normalize) {
702 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
703 __ cmp(r3, ip);
704 Split(eq, if_true, if_false, NULL);
705 __ bind(&skip);
706 }
707}
708
709
710void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
711 // The variable in the declaration always resides in the current function
712 // context.
713 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100714 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000715 // Check that we're not inside a with or catch context.
716 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
717 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
718 __ Check(ne, kDeclarationInWithContext);
719 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
720 __ Check(ne, kDeclarationInCatchContext);
721 }
722}
723
724
725void FullCodeGenerator::VisitVariableDeclaration(
726 VariableDeclaration* declaration) {
727 // If it was not possible to allocate the variable at compile time, we
728 // need to "declare" it at runtime to make sure it actually exists in the
729 // local context.
730 VariableProxy* proxy = declaration->proxy();
731 VariableMode mode = declaration->mode();
732 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100733 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000734 switch (variable->location()) {
735 case VariableLocation::GLOBAL:
736 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100737 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000738 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100739 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000740 break;
741
742 case VariableLocation::PARAMETER:
743 case VariableLocation::LOCAL:
744 if (hole_init) {
745 Comment cmnt(masm_, "[ VariableDeclaration");
746 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
747 __ StoreP(ip, StackOperand(variable));
748 }
749 break;
750
751 case VariableLocation::CONTEXT:
752 if (hole_init) {
753 Comment cmnt(masm_, "[ VariableDeclaration");
754 EmitDebugCheckDeclarationContext(variable);
755 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
756 __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0);
757 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100758 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000759 }
760 break;
761
762 case VariableLocation::LOOKUP: {
763 Comment cmnt(masm_, "[ VariableDeclaration");
764 __ mov(r5, Operand(variable->name()));
765 // Declaration nodes are always introduced in one of four modes.
766 DCHECK(IsDeclaredVariableMode(mode));
767 // Push initial value, if any.
768 // Note: For variables we must not push an initial value (such as
769 // 'undefined') because we may have a (legal) redeclaration and we
770 // must not destroy the current value.
771 if (hole_init) {
772 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
773 } else {
774 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
775 }
776 __ Push(r5, r3);
777 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
778 __ CallRuntime(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100779 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000780 break;
781 }
782 }
783}
784
785
786void FullCodeGenerator::VisitFunctionDeclaration(
787 FunctionDeclaration* declaration) {
788 VariableProxy* proxy = declaration->proxy();
789 Variable* variable = proxy->var();
790 switch (variable->location()) {
791 case VariableLocation::GLOBAL:
792 case VariableLocation::UNALLOCATED: {
793 globals_->Add(variable->name(), zone());
794 Handle<SharedFunctionInfo> function =
795 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
796 // Check for stack-overflow exception.
797 if (function.is_null()) return SetStackOverflow();
798 globals_->Add(function, zone());
799 break;
800 }
801
802 case VariableLocation::PARAMETER:
803 case VariableLocation::LOCAL: {
804 Comment cmnt(masm_, "[ FunctionDeclaration");
805 VisitForAccumulatorValue(declaration->fun());
806 __ StoreP(result_register(), StackOperand(variable));
807 break;
808 }
809
810 case VariableLocation::CONTEXT: {
811 Comment cmnt(masm_, "[ FunctionDeclaration");
812 EmitDebugCheckDeclarationContext(variable);
813 VisitForAccumulatorValue(declaration->fun());
814 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()),
815 r0);
816 int offset = Context::SlotOffset(variable->index());
817 // We know that we have written a function, which is not a smi.
818 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
819 kLRHasBeenSaved, kDontSaveFPRegs,
820 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100821 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000822 break;
823 }
824
825 case VariableLocation::LOOKUP: {
826 Comment cmnt(masm_, "[ FunctionDeclaration");
827 __ mov(r5, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100828 PushOperand(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000829 // Push initial value for function declaration.
830 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100831 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
832 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100833 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000834 break;
835 }
836 }
837}
838
839
840void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
841 // Call the runtime to declare the globals.
842 __ mov(r4, Operand(pairs));
843 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
844 __ Push(r4, r3);
845 __ CallRuntime(Runtime::kDeclareGlobals);
846 // Return value is ignored.
847}
848
849
850void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
851 // Call the runtime to declare the modules.
852 __ Push(descriptions);
853 __ CallRuntime(Runtime::kDeclareModules);
854 // Return value is ignored.
855}
856
857
858void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
859 Comment cmnt(masm_, "[ SwitchStatement");
860 Breakable nested_statement(this, stmt);
861 SetStatementPosition(stmt);
862
863 // Keep the switch value on the stack until a case matches.
864 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100865 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000866
867 ZoneList<CaseClause*>* clauses = stmt->cases();
868 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
869
870 Label next_test; // Recycled for each test.
871 // Compile all the tests with branches to their bodies.
872 for (int i = 0; i < clauses->length(); i++) {
873 CaseClause* clause = clauses->at(i);
874 clause->body_target()->Unuse();
875
876 // The default is not a test, but remember it as final fall through.
877 if (clause->is_default()) {
878 default_clause = clause;
879 continue;
880 }
881
882 Comment cmnt(masm_, "[ Case comparison");
883 __ bind(&next_test);
884 next_test.Unuse();
885
886 // Compile the label expression.
887 VisitForAccumulatorValue(clause->label());
888
889 // Perform the comparison as if via '==='.
890 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
891 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
892 JumpPatchSite patch_site(masm_);
893 if (inline_smi_code) {
894 Label slow_case;
895 __ orx(r5, r4, r3);
896 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
897
898 __ cmp(r4, r3);
899 __ bne(&next_test);
900 __ Drop(1); // Switch value is no longer needed.
901 __ b(clause->body_target());
902 __ bind(&slow_case);
903 }
904
905 // Record position before stub call for type feedback.
906 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100907 Handle<Code> ic =
908 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000909 CallIC(ic, clause->CompareId());
910 patch_site.EmitPatchInfo();
911
912 Label skip;
913 __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100914 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000915 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
916 __ cmp(r3, ip);
917 __ bne(&next_test);
918 __ Drop(1);
919 __ b(clause->body_target());
920 __ bind(&skip);
921
922 __ cmpi(r3, Operand::Zero());
923 __ bne(&next_test);
924 __ Drop(1); // Switch value is no longer needed.
925 __ b(clause->body_target());
926 }
927
928 // Discard the test value and jump to the default if present, otherwise to
929 // the end of the statement.
930 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100931 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000932 if (default_clause == NULL) {
933 __ b(nested_statement.break_label());
934 } else {
935 __ b(default_clause->body_target());
936 }
937
938 // Compile all the case bodies.
939 for (int i = 0; i < clauses->length(); i++) {
940 Comment cmnt(masm_, "[ Case body");
941 CaseClause* clause = clauses->at(i);
942 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100943 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000944 VisitStatements(clause->statements());
945 }
946
947 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100948 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000949}
950
951
952void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
953 Comment cmnt(masm_, "[ ForInStatement");
954 SetStatementPosition(stmt, SKIP_BREAK);
955
956 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
957
Ben Murdoch097c5b22016-05-18 11:27:45 +0100958 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000959 SetExpressionAsStatementPosition(stmt->enumerable());
960 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100961 OperandStackDepthIncrement(5);
962
963 Label loop, exit;
964 Iteration loop_statement(this, stmt);
965 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000966
Ben Murdoch097c5b22016-05-18 11:27:45 +0100967 // If the object is null or undefined, skip over the loop, otherwise convert
968 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000969 Label convert, done_convert;
970 __ JumpIfSmi(r3, &convert);
971 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
972 __ bge(&done_convert);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100973 __ CompareRoot(r3, Heap::kNullValueRootIndex);
974 __ beq(&exit);
975 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
976 __ beq(&exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000977 __ bind(&convert);
978 ToObjectStub stub(isolate());
979 __ CallStub(&stub);
980 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +0100981 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000982 __ push(r3);
983
Ben Murdochc5610432016-08-08 18:44:38 +0100984 // Check cache validity in generated code. If we cannot guarantee cache
985 // validity, call the runtime system to check cache validity or get the
986 // property names in a fixed array. Note: Proxies never have an enum cache,
987 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100988 Label call_runtime;
989 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000990
991 // The enum cache is valid. Load the map of the object being
992 // iterated over and use the cache for the iteration.
993 Label use_cache;
994 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
995 __ b(&use_cache);
996
997 // Get the set of properties to enumerate.
998 __ bind(&call_runtime);
999 __ push(r3); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001000 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +01001001 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001002
1003 // If we got a map from the runtime call, we can do a fast
1004 // modification check. Otherwise, we got a fixed array, and we have
1005 // to do a slow check.
1006 Label fixed_array;
1007 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1008 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1009 __ cmp(r5, ip);
1010 __ bne(&fixed_array);
1011
1012 // We got a map in register r3. Get the enumeration cache from it.
1013 Label no_descriptors;
1014 __ bind(&use_cache);
1015
1016 __ EnumLength(r4, r3);
1017 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1018 __ beq(&no_descriptors);
1019
1020 __ LoadInstanceDescriptors(r3, r5);
1021 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1022 __ LoadP(r5,
1023 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1024
1025 // Set up the four remaining stack slots.
1026 __ push(r3); // Map.
1027 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1028 // Push enumeration cache, enumeration cache length (as smi) and zero.
1029 __ Push(r5, r4, r3);
1030 __ b(&loop);
1031
1032 __ bind(&no_descriptors);
1033 __ Drop(1);
1034 __ b(&exit);
1035
1036 // We got a fixed array in register r3. Iterate through that.
1037 __ bind(&fixed_array);
1038
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001039 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi(1) indicates slow check
1040 __ Push(r4, r3); // Smi and array
1041 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001042 __ Push(r4); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001043 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044 __ LoadSmiLiteral(r3, Smi::FromInt(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001045 __ Push(r3); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001046
1047 // Generate code for doing the condition check.
1048 __ bind(&loop);
1049 SetExpressionAsStatementPosition(stmt->each());
1050
1051 // Load the current count to r3, load the length to r4.
1052 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1053 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1054 __ cmpl(r3, r4); // Compare to the array length.
1055 __ bge(loop_statement.break_label());
1056
1057 // Get the current entry of the array into register r6.
1058 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1059 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1060 __ SmiToPtrArrayOffset(r6, r3);
1061 __ LoadPX(r6, MemOperand(r6, r5));
1062
1063 // Get the expected map from the stack or a smi in the
1064 // permanent slow case into register r5.
1065 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1066
1067 // Check if the expected map still matches that of the enumerable.
1068 // If not, we may have to filter the key.
1069 Label update_each;
1070 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1071 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1072 __ cmp(r7, r5);
1073 __ beq(&update_each);
1074
Ben Murdochda12d292016-06-02 14:46:10 +01001075 // We need to filter the key, record slow-path here.
1076 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001077 __ EmitLoadTypeFeedbackVector(r3);
1078 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1079 __ StoreP(
1080 r5, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0);
1081
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001082 // Convert the entry to a string or (smi) 0 if it isn't a property
1083 // any more. If the property has been removed while iterating, we
1084 // just skip it.
1085 __ Push(r4, r6); // Enumerable and current entry.
1086 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001087 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088 __ mr(r6, r3);
1089 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1090 __ cmp(r3, r0);
1091 __ beq(loop_statement.continue_label());
1092
1093 // Update the 'each' property or variable from the possibly filtered
1094 // entry in register r6.
1095 __ bind(&update_each);
1096 __ mr(result_register(), r6);
1097 // Perform the assignment as if via '='.
1098 {
1099 EffectContext context(this);
1100 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001101 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001102 }
1103
1104 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001105 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001106 // Generate code for the body of the loop.
1107 Visit(stmt->body());
1108
1109 // Generate code for the going to the next element by incrementing
1110 // the index (smi) stored on top of the stack.
1111 __ bind(loop_statement.continue_label());
1112 __ pop(r3);
1113 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1114 __ push(r3);
1115
1116 EmitBackEdgeBookkeeping(stmt, &loop);
1117 __ b(&loop);
1118
1119 // Remove the pointers stored on the stack.
1120 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001121 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001122
1123 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001124 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001125 __ bind(&exit);
1126 decrement_loop_depth();
1127}
1128
1129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001130void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1131 FeedbackVectorSlot slot) {
1132 DCHECK(NeedsHomeObject(initializer));
1133 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1134 __ mov(StoreDescriptor::NameRegister(),
1135 Operand(isolate()->factory()->home_object_symbol()));
1136 __ LoadP(StoreDescriptor::ValueRegister(),
1137 MemOperand(sp, offset * kPointerSize));
1138 EmitLoadStoreICSlot(slot);
1139 CallStoreIC();
1140}
1141
1142
1143void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1144 int offset,
1145 FeedbackVectorSlot slot) {
1146 DCHECK(NeedsHomeObject(initializer));
1147 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1148 __ mov(StoreDescriptor::NameRegister(),
1149 Operand(isolate()->factory()->home_object_symbol()));
1150 __ LoadP(StoreDescriptor::ValueRegister(),
1151 MemOperand(sp, offset * kPointerSize));
1152 EmitLoadStoreICSlot(slot);
1153 CallStoreIC();
1154}
1155
1156
1157void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1158 TypeofMode typeof_mode,
1159 Label* slow) {
1160 Register current = cp;
1161 Register next = r4;
1162 Register temp = r5;
1163
1164 Scope* s = scope();
1165 while (s != NULL) {
1166 if (s->num_heap_slots() > 0) {
1167 if (s->calls_sloppy_eval()) {
1168 // Check that extension is "the hole".
1169 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1170 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1171 }
1172 // Load next context in chain.
1173 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1174 // Walk the rest of the chain without clobbering cp.
1175 current = next;
1176 }
1177 // If no outer scope calls eval, we do not need to check more
1178 // context extensions.
1179 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1180 s = s->outer_scope();
1181 }
1182
1183 if (s->is_eval_scope()) {
1184 Label loop, fast;
1185 if (!current.is(next)) {
1186 __ Move(next, current);
1187 }
1188 __ bind(&loop);
1189 // Terminate at native context.
1190 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1191 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1192 __ cmp(temp, ip);
1193 __ beq(&fast);
1194 // Check that extension is "the hole".
1195 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1196 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1197 // Load next context in chain.
1198 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1199 __ b(&loop);
1200 __ bind(&fast);
1201 }
1202
1203 // All extension objects were empty and it is safe to use a normal global
1204 // load machinery.
1205 EmitGlobalVariableLoad(proxy, typeof_mode);
1206}
1207
1208
1209MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1210 Label* slow) {
1211 DCHECK(var->IsContextSlot());
1212 Register context = cp;
1213 Register next = r6;
1214 Register temp = r7;
1215
1216 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1217 if (s->num_heap_slots() > 0) {
1218 if (s->calls_sloppy_eval()) {
1219 // Check that extension is "the hole".
1220 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1221 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1222 }
1223 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1224 // Walk the rest of the chain without clobbering cp.
1225 context = next;
1226 }
1227 }
1228 // Check that last extension is "the hole".
1229 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1230 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1231
1232 // This function is used only for loads, not stores, so it's safe to
1233 // return an cp-based operand (the write barrier cannot be allowed to
1234 // destroy the cp register).
1235 return ContextMemOperand(context, var->index());
1236}
1237
1238
1239void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1240 TypeofMode typeof_mode,
1241 Label* slow, Label* done) {
1242 // Generate fast-case code for variables that might be shadowed by
1243 // eval-introduced variables. Eval is used a lot without
1244 // introducing variables. In those cases, we do not want to
1245 // perform a runtime call for all variables in the scope
1246 // containing the eval.
1247 Variable* var = proxy->var();
1248 if (var->mode() == DYNAMIC_GLOBAL) {
1249 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1250 __ b(done);
1251 } else if (var->mode() == DYNAMIC_LOCAL) {
1252 Variable* local = var->local_if_not_shadowed();
1253 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001254 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001255 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1256 __ bne(done);
Ben Murdochc5610432016-08-08 18:44:38 +01001257 __ mov(r3, Operand(var->name()));
1258 __ push(r3);
1259 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001260 }
1261 __ b(done);
1262 }
1263}
1264
1265
1266void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1267 TypeofMode typeof_mode) {
1268 Variable* var = proxy->var();
1269 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1270 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1271 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1272 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1273 __ mov(LoadDescriptor::SlotRegister(),
1274 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1275 CallLoadIC(typeof_mode);
1276}
1277
1278
1279void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1280 TypeofMode typeof_mode) {
1281 // Record position before possible IC call.
1282 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001283 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001284 Variable* var = proxy->var();
1285
1286 // Three cases: global variables, lookup variables, and all other types of
1287 // variables.
1288 switch (var->location()) {
1289 case VariableLocation::GLOBAL:
1290 case VariableLocation::UNALLOCATED: {
1291 Comment cmnt(masm_, "[ Global variable");
1292 EmitGlobalVariableLoad(proxy, typeof_mode);
1293 context()->Plug(r3);
1294 break;
1295 }
1296
1297 case VariableLocation::PARAMETER:
1298 case VariableLocation::LOCAL:
1299 case VariableLocation::CONTEXT: {
1300 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1301 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1302 : "[ Stack variable");
1303 if (NeedsHoleCheckForLoad(proxy)) {
1304 Label done;
1305 // Let and const need a read barrier.
1306 GetVar(r3, var);
1307 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1308 __ bne(&done);
1309 if (var->mode() == LET || var->mode() == CONST) {
1310 // Throw a reference error when using an uninitialized let/const
1311 // binding in harmony mode.
1312 __ mov(r3, Operand(var->name()));
1313 __ push(r3);
1314 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001315 }
1316 __ bind(&done);
1317 context()->Plug(r3);
1318 break;
1319 }
1320 context()->Plug(var);
1321 break;
1322 }
1323
1324 case VariableLocation::LOOKUP: {
1325 Comment cmnt(masm_, "[ Lookup variable");
1326 Label done, slow;
1327 // Generate code for loading from variables potentially shadowed
1328 // by eval-introduced variables.
1329 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1330 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001331 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001332 Runtime::FunctionId function_id =
1333 typeof_mode == NOT_INSIDE_TYPEOF
1334 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001335 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336 __ CallRuntime(function_id);
1337 __ bind(&done);
1338 context()->Plug(r3);
1339 }
1340 }
1341}
1342
1343
1344void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1345 Comment cmnt(masm_, "[ RegExpLiteral");
1346 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1347 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1348 __ mov(r4, Operand(expr->pattern()));
1349 __ LoadSmiLiteral(r3, Smi::FromInt(expr->flags()));
1350 FastCloneRegExpStub stub(isolate());
1351 __ CallStub(&stub);
1352 context()->Plug(r3);
1353}
1354
1355
1356void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1357 Expression* expression = (property == NULL) ? NULL : property->value();
1358 if (expression == NULL) {
1359 __ LoadRoot(r4, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001360 PushOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 } else {
1362 VisitForStackValue(expression);
1363 if (NeedsHomeObject(expression)) {
1364 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1365 property->kind() == ObjectLiteral::Property::SETTER);
1366 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1367 EmitSetHomeObject(expression, offset, property->GetSlot());
1368 }
1369 }
1370}
1371
1372
1373void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1374 Comment cmnt(masm_, "[ ObjectLiteral");
1375
1376 Handle<FixedArray> constant_properties = expr->constant_properties();
1377 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1378 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1379 __ mov(r4, Operand(constant_properties));
1380 int flags = expr->ComputeFlags();
1381 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1382 if (MustCreateObjectLiteralWithRuntime(expr)) {
1383 __ Push(r6, r5, r4, r3);
1384 __ CallRuntime(Runtime::kCreateObjectLiteral);
1385 } else {
1386 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1387 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001388 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001389 }
Ben Murdochc5610432016-08-08 18:44:38 +01001390 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001391
1392 // If result_saved is true the result is on top of the stack. If
1393 // result_saved is false the result is in r3.
1394 bool result_saved = false;
1395
1396 AccessorTable accessor_table(zone());
1397 int property_index = 0;
1398 for (; property_index < expr->properties()->length(); property_index++) {
1399 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1400 if (property->is_computed_name()) break;
1401 if (property->IsCompileTimeValue()) continue;
1402
1403 Literal* key = property->key()->AsLiteral();
1404 Expression* value = property->value();
1405 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001406 PushOperand(r3); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001407 result_saved = true;
1408 }
1409 switch (property->kind()) {
1410 case ObjectLiteral::Property::CONSTANT:
1411 UNREACHABLE();
1412 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1413 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1414 // Fall through.
1415 case ObjectLiteral::Property::COMPUTED:
1416 // It is safe to use [[Put]] here because the boilerplate already
1417 // contains computed properties with an uninitialized value.
1418 if (key->value()->IsInternalizedString()) {
1419 if (property->emit_store()) {
1420 VisitForAccumulatorValue(value);
1421 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1422 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1423 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1424 EmitLoadStoreICSlot(property->GetSlot(0));
1425 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001426 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001427
1428 if (NeedsHomeObject(value)) {
1429 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1430 }
1431 } else {
1432 VisitForEffect(value);
1433 }
1434 break;
1435 }
1436 // Duplicate receiver on stack.
1437 __ LoadP(r3, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001438 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001439 VisitForStackValue(key);
1440 VisitForStackValue(value);
1441 if (property->emit_store()) {
1442 if (NeedsHomeObject(value)) {
1443 EmitSetHomeObject(value, 2, property->GetSlot());
1444 }
1445 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
Ben Murdoch097c5b22016-05-18 11:27:45 +01001446 PushOperand(r3);
1447 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001449 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001450 }
1451 break;
1452 case ObjectLiteral::Property::PROTOTYPE:
1453 // Duplicate receiver on stack.
1454 __ LoadP(r3, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001455 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001456 VisitForStackValue(value);
1457 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001458 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001459 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001460 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001461 break;
1462 case ObjectLiteral::Property::GETTER:
1463 if (property->emit_store()) {
1464 accessor_table.lookup(key)->second->getter = property;
1465 }
1466 break;
1467 case ObjectLiteral::Property::SETTER:
1468 if (property->emit_store()) {
1469 accessor_table.lookup(key)->second->setter = property;
1470 }
1471 break;
1472 }
1473 }
1474
1475 // Emit code to define accessors, using only a single call to the runtime for
1476 // each pair of corresponding getters and setters.
1477 for (AccessorTable::Iterator it = accessor_table.begin();
1478 it != accessor_table.end(); ++it) {
1479 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001480 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001481 VisitForStackValue(it->first);
1482 EmitAccessor(it->second->getter);
1483 EmitAccessor(it->second->setter);
1484 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001485 PushOperand(r3);
1486 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001487 }
1488
1489 // Object literals have two parts. The "static" part on the left contains no
1490 // computed property names, and so we can compute its map ahead of time; see
1491 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1492 // starts with the first computed property name, and continues with all
1493 // properties to its right. All the code from above initializes the static
1494 // component of the object literal, and arranges for the map of the result to
1495 // reflect the static order in which the keys appear. For the dynamic
1496 // properties, we compile them into a series of "SetOwnProperty" runtime
1497 // calls. This will preserve insertion order.
1498 for (; property_index < expr->properties()->length(); property_index++) {
1499 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1500
1501 Expression* value = property->value();
1502 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001503 PushOperand(r3); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001504 result_saved = true;
1505 }
1506
1507 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001508 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001509
1510 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1511 DCHECK(!property->is_computed_name());
1512 VisitForStackValue(value);
1513 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001514 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001515 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001516 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001517 } else {
1518 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1519 VisitForStackValue(value);
1520 if (NeedsHomeObject(value)) {
1521 EmitSetHomeObject(value, 2, property->GetSlot());
1522 }
1523
1524 switch (property->kind()) {
1525 case ObjectLiteral::Property::CONSTANT:
1526 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1527 case ObjectLiteral::Property::COMPUTED:
1528 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001529 PushOperand(Smi::FromInt(NONE));
1530 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1531 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001532 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001533 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 }
1535 break;
1536
1537 case ObjectLiteral::Property::PROTOTYPE:
1538 UNREACHABLE();
1539 break;
1540
1541 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001542 PushOperand(Smi::FromInt(NONE));
1543 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 break;
1545
1546 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001547 PushOperand(Smi::FromInt(NONE));
1548 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001549 break;
1550 }
1551 }
1552 }
1553
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 if (result_saved) {
1555 context()->PlugTOS();
1556 } else {
1557 context()->Plug(r3);
1558 }
1559}
1560
1561
1562void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1563 Comment cmnt(masm_, "[ ArrayLiteral");
1564
1565 Handle<FixedArray> constant_elements = expr->constant_elements();
1566 bool has_fast_elements =
1567 IsFastObjectElementsKind(expr->constant_elements_kind());
1568 Handle<FixedArrayBase> constant_elements_values(
1569 FixedArrayBase::cast(constant_elements->get(1)));
1570
1571 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1572 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1573 // If the only customer of allocation sites is transitioning, then
1574 // we can turn it off if we don't have anywhere else to transition to.
1575 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1576 }
1577
1578 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1579 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1580 __ mov(r4, Operand(constant_elements));
1581 if (MustCreateArrayLiteralWithRuntime(expr)) {
1582 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1583 __ Push(r6, r5, r4, r3);
1584 __ CallRuntime(Runtime::kCreateArrayLiteral);
1585 } else {
1586 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1587 __ CallStub(&stub);
1588 }
Ben Murdochc5610432016-08-08 18:44:38 +01001589 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590
1591 bool result_saved = false; // Is the result saved to the stack?
1592 ZoneList<Expression*>* subexprs = expr->values();
1593 int length = subexprs->length();
1594
1595 // Emit code to evaluate all the non-constant subexpressions and to store
1596 // them into the newly cloned array.
1597 int array_index = 0;
1598 for (; array_index < length; array_index++) {
1599 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001600 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001601 // If the subexpression is a literal or a simple materialized literal it
1602 // is already set in the cloned array.
1603 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1604
1605 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001606 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001607 result_saved = true;
1608 }
1609 VisitForAccumulatorValue(subexpr);
1610
1611 __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1612 Smi::FromInt(array_index));
1613 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1614 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1615 Handle<Code> ic =
1616 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1617 CallIC(ic);
1618
Ben Murdochc5610432016-08-08 18:44:38 +01001619 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1620 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001621 }
1622
1623 // In case the array literal contains spread expressions it has two parts. The
1624 // first part is the "static" array which has a literal index is handled
1625 // above. The second part is the part after the first spread expression
1626 // (inclusive) and these elements gets appended to the array. Note that the
1627 // number elements an iterable produces is unknown ahead of time.
1628 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001629 PopOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001630 result_saved = false;
1631 }
1632 for (; array_index < length; array_index++) {
1633 Expression* subexpr = subexprs->at(array_index);
1634
Ben Murdoch097c5b22016-05-18 11:27:45 +01001635 PushOperand(r3);
1636 DCHECK(!subexpr->IsSpread());
1637 VisitForStackValue(subexpr);
1638 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001639
Ben Murdochc5610432016-08-08 18:44:38 +01001640 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1641 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001642 }
1643
1644 if (result_saved) {
1645 context()->PlugTOS();
1646 } else {
1647 context()->Plug(r3);
1648 }
1649}
1650
1651
1652void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1653 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1654
1655 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001656
1657 Property* property = expr->target()->AsProperty();
1658 LhsKind assign_type = Property::GetAssignType(property);
1659
1660 // Evaluate LHS expression.
1661 switch (assign_type) {
1662 case VARIABLE:
1663 // Nothing to do here.
1664 break;
1665 case NAMED_PROPERTY:
1666 if (expr->is_compound()) {
1667 // We need the receiver both on the stack and in the register.
1668 VisitForStackValue(property->obj());
1669 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1670 } else {
1671 VisitForStackValue(property->obj());
1672 }
1673 break;
1674 case NAMED_SUPER_PROPERTY:
1675 VisitForStackValue(
1676 property->obj()->AsSuperPropertyReference()->this_var());
1677 VisitForAccumulatorValue(
1678 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001679 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001680 if (expr->is_compound()) {
1681 const Register scratch = r4;
1682 __ LoadP(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001683 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001684 }
1685 break;
1686 case KEYED_SUPER_PROPERTY: {
1687 const Register scratch = r4;
1688 VisitForStackValue(
1689 property->obj()->AsSuperPropertyReference()->this_var());
1690 VisitForAccumulatorValue(
1691 property->obj()->AsSuperPropertyReference()->home_object());
1692 __ mr(scratch, result_register());
1693 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001694 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001695 if (expr->is_compound()) {
1696 const Register scratch1 = r5;
1697 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001698 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001699 }
1700 break;
1701 }
1702 case KEYED_PROPERTY:
1703 if (expr->is_compound()) {
1704 VisitForStackValue(property->obj());
1705 VisitForStackValue(property->key());
1706 __ LoadP(LoadDescriptor::ReceiverRegister(),
1707 MemOperand(sp, 1 * kPointerSize));
1708 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1709 } else {
1710 VisitForStackValue(property->obj());
1711 VisitForStackValue(property->key());
1712 }
1713 break;
1714 }
1715
1716 // For compound assignments we need another deoptimization point after the
1717 // variable/property load.
1718 if (expr->is_compound()) {
1719 {
1720 AccumulatorValueContext context(this);
1721 switch (assign_type) {
1722 case VARIABLE:
1723 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001724 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001725 break;
1726 case NAMED_PROPERTY:
1727 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001728 PrepareForBailoutForId(property->LoadId(),
1729 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001730 break;
1731 case NAMED_SUPER_PROPERTY:
1732 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001733 PrepareForBailoutForId(property->LoadId(),
1734 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001735 break;
1736 case KEYED_SUPER_PROPERTY:
1737 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001738 PrepareForBailoutForId(property->LoadId(),
1739 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001740 break;
1741 case KEYED_PROPERTY:
1742 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001743 PrepareForBailoutForId(property->LoadId(),
1744 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745 break;
1746 }
1747 }
1748
1749 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001750 PushOperand(r3); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 VisitForAccumulatorValue(expr->value());
1752
1753 AccumulatorValueContext context(this);
1754 if (ShouldInlineSmiCase(op)) {
1755 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1756 expr->value());
1757 } else {
1758 EmitBinaryOp(expr->binary_operation(), op);
1759 }
1760
1761 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001762 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001763 } else {
1764 VisitForAccumulatorValue(expr->value());
1765 }
1766
1767 SetExpressionPosition(expr);
1768
1769 // Store the value.
1770 switch (assign_type) {
1771 case VARIABLE:
1772 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1773 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001774 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001775 context()->Plug(r3);
1776 break;
1777 case NAMED_PROPERTY:
1778 EmitNamedPropertyAssignment(expr);
1779 break;
1780 case NAMED_SUPER_PROPERTY:
1781 EmitNamedSuperPropertyStore(property);
1782 context()->Plug(r3);
1783 break;
1784 case KEYED_SUPER_PROPERTY:
1785 EmitKeyedSuperPropertyStore(property);
1786 context()->Plug(r3);
1787 break;
1788 case KEYED_PROPERTY:
1789 EmitKeyedPropertyAssignment(expr);
1790 break;
1791 }
1792}
1793
1794
1795void FullCodeGenerator::VisitYield(Yield* expr) {
1796 Comment cmnt(masm_, "[ Yield");
1797 SetExpressionPosition(expr);
1798
1799 // Evaluate yielded value first; the initial iterator definition depends on
1800 // this. It stays on the stack while we update the iterator.
1801 VisitForStackValue(expr->expression());
1802
Ben Murdochc5610432016-08-08 18:44:38 +01001803 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001804
Ben Murdochda12d292016-06-02 14:46:10 +01001805 __ b(&suspend);
1806 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001807 // When we arrive here, r3 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001808 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001809 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset));
1810 __ LoadP(r3, FieldMemOperand(r3, JSGeneratorObject::kInputOffset));
1811 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1812 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1813 __ CmpSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kReturn), r0);
1814 __ blt(&resume);
1815 __ Push(result_register());
1816 __ bgt(&exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001817 EmitCreateIteratorResult(true);
1818 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001819
Ben Murdochc5610432016-08-08 18:44:38 +01001820 __ bind(&exception);
1821 __ CallRuntime(Runtime::kThrow);
1822
Ben Murdochda12d292016-06-02 14:46:10 +01001823 __ bind(&suspend);
1824 OperandStackDepthIncrement(1); // Not popped on this path.
1825 VisitForAccumulatorValue(expr->generator_object());
1826 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1827 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
1828 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
1829 r0);
1830 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
1831 __ mr(r4, cp);
1832 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
1833 kLRHasBeenSaved, kDontSaveFPRegs);
1834 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1835 __ cmp(sp, r4);
1836 __ beq(&post_runtime);
1837 __ push(r3); // generator object
1838 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001839 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001840 __ bind(&post_runtime);
1841 PopOperand(result_register());
1842 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001843
Ben Murdochda12d292016-06-02 14:46:10 +01001844 __ bind(&resume);
1845 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001846}
1847
Ben Murdoch097c5b22016-05-18 11:27:45 +01001848void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1849 OperandStackDepthIncrement(2);
1850 __ Push(reg1, reg2);
1851}
1852
1853void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1854 Register reg3) {
1855 OperandStackDepthIncrement(3);
1856 __ Push(reg1, reg2, reg3);
1857}
1858
1859void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1860 Register reg3, Register reg4) {
1861 OperandStackDepthIncrement(4);
1862 __ Push(reg1, reg2, reg3, reg4);
1863}
1864
1865void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1866 OperandStackDepthDecrement(2);
1867 __ Pop(reg1, reg2);
1868}
1869
1870void FullCodeGenerator::EmitOperandStackDepthCheck() {
1871 if (FLAG_debug_code) {
1872 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1873 operand_stack_depth_ * kPointerSize;
1874 __ sub(r3, fp, sp);
1875 __ cmpi(r3, Operand(expected_diff));
1876 __ Assert(eq, kUnexpectedStackDepth);
1877 }
1878}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001879
1880void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1881 Label allocate, done_allocate;
1882
Ben Murdochc5610432016-08-08 18:44:38 +01001883 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate,
1884 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001885 __ b(&done_allocate);
1886
1887 __ bind(&allocate);
1888 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1889 __ CallRuntime(Runtime::kAllocateInNewSpace);
1890
1891 __ bind(&done_allocate);
1892 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
Ben Murdochda12d292016-06-02 14:46:10 +01001893 PopOperand(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001894 __ LoadRoot(r6,
1895 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1896 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
1897 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
1898 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
1899 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1900 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
1901 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
1902}
1903
1904
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001905void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1906 Token::Value op,
1907 Expression* left_expr,
1908 Expression* right_expr) {
1909 Label done, smi_case, stub_call;
1910
1911 Register scratch1 = r5;
1912 Register scratch2 = r6;
1913
1914 // Get the arguments.
1915 Register left = r4;
1916 Register right = r3;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001917 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001918
1919 // Perform combined smi check on both operands.
1920 __ orx(scratch1, left, right);
1921 STATIC_ASSERT(kSmiTag == 0);
1922 JumpPatchSite patch_site(masm_);
1923 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1924
1925 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001926 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001927 CallIC(code, expr->BinaryOperationFeedbackId());
1928 patch_site.EmitPatchInfo();
1929 __ b(&done);
1930
1931 __ bind(&smi_case);
1932 // Smi case. This code works the same way as the smi-smi case in the type
1933 // recording binary operation stub.
1934 switch (op) {
1935 case Token::SAR:
1936 __ GetLeastBitsFromSmi(scratch1, right, 5);
1937 __ ShiftRightArith(right, left, scratch1);
1938 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
1939 break;
1940 case Token::SHL: {
1941 __ GetLeastBitsFromSmi(scratch2, right, 5);
1942#if V8_TARGET_ARCH_PPC64
1943 __ ShiftLeft_(right, left, scratch2);
1944#else
1945 __ SmiUntag(scratch1, left);
1946 __ ShiftLeft_(scratch1, scratch1, scratch2);
1947 // Check that the *signed* result fits in a smi
1948 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
1949 __ SmiTag(right, scratch1);
1950#endif
1951 break;
1952 }
1953 case Token::SHR: {
1954 __ SmiUntag(scratch1, left);
1955 __ GetLeastBitsFromSmi(scratch2, right, 5);
1956 __ srw(scratch1, scratch1, scratch2);
1957 // Unsigned shift is not allowed to produce a negative number.
1958 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
1959 __ SmiTag(right, scratch1);
1960 break;
1961 }
1962 case Token::ADD: {
1963 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1964 __ BranchOnOverflow(&stub_call);
1965 __ mr(right, scratch1);
1966 break;
1967 }
1968 case Token::SUB: {
1969 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1970 __ BranchOnOverflow(&stub_call);
1971 __ mr(right, scratch1);
1972 break;
1973 }
1974 case Token::MUL: {
1975 Label mul_zero;
1976#if V8_TARGET_ARCH_PPC64
1977 // Remove tag from both operands.
1978 __ SmiUntag(ip, right);
1979 __ SmiUntag(r0, left);
1980 __ Mul(scratch1, r0, ip);
1981 // Check for overflowing the smi range - no overflow if higher 33 bits of
1982 // the result are identical.
1983 __ TestIfInt32(scratch1, r0);
1984 __ bne(&stub_call);
1985#else
1986 __ SmiUntag(ip, right);
1987 __ mullw(scratch1, left, ip);
1988 __ mulhw(scratch2, left, ip);
1989 // Check for overflowing the smi range - no overflow if higher 33 bits of
1990 // the result are identical.
1991 __ TestIfInt32(scratch2, scratch1, ip);
1992 __ bne(&stub_call);
1993#endif
1994 // Go slow on zero result to handle -0.
1995 __ cmpi(scratch1, Operand::Zero());
1996 __ beq(&mul_zero);
1997#if V8_TARGET_ARCH_PPC64
1998 __ SmiTag(right, scratch1);
1999#else
2000 __ mr(right, scratch1);
2001#endif
2002 __ b(&done);
2003 // We need -0 if we were multiplying a negative number with 0 to get 0.
2004 // We know one of them was zero.
2005 __ bind(&mul_zero);
2006 __ add(scratch2, right, left);
2007 __ cmpi(scratch2, Operand::Zero());
2008 __ blt(&stub_call);
2009 __ LoadSmiLiteral(right, Smi::FromInt(0));
2010 break;
2011 }
2012 case Token::BIT_OR:
2013 __ orx(right, left, right);
2014 break;
2015 case Token::BIT_AND:
2016 __ and_(right, left, right);
2017 break;
2018 case Token::BIT_XOR:
2019 __ xor_(right, left, right);
2020 break;
2021 default:
2022 UNREACHABLE();
2023 }
2024
2025 __ bind(&done);
2026 context()->Plug(r3);
2027}
2028
2029
2030void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002031 for (int i = 0; i < lit->properties()->length(); i++) {
2032 ObjectLiteral::Property* property = lit->properties()->at(i);
2033 Expression* value = property->value();
2034
Ben Murdoch097c5b22016-05-18 11:27:45 +01002035 Register scratch = r4;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002036 if (property->is_static()) {
2037 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2038 } else {
2039 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2040 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002041 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002042 EmitPropertyKey(property, lit->GetIdForProperty(i));
2043
2044 // The static prototype property is read only. We handle the non computed
2045 // property name case in the parser. Since this is the only case where we
2046 // need to check for an own read only property we special case this so we do
2047 // not need to do this for every property.
2048 if (property->is_static() && property->is_computed_name()) {
2049 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2050 __ push(r3);
2051 }
2052
2053 VisitForStackValue(value);
2054 if (NeedsHomeObject(value)) {
2055 EmitSetHomeObject(value, 2, property->GetSlot());
2056 }
2057
2058 switch (property->kind()) {
2059 case ObjectLiteral::Property::CONSTANT:
2060 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2061 case ObjectLiteral::Property::PROTOTYPE:
2062 UNREACHABLE();
2063 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002064 PushOperand(Smi::FromInt(DONT_ENUM));
2065 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2066 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002067 break;
2068
2069 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002070 PushOperand(Smi::FromInt(DONT_ENUM));
2071 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002072 break;
2073
2074 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002075 PushOperand(Smi::FromInt(DONT_ENUM));
2076 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002077 break;
2078
2079 default:
2080 UNREACHABLE();
2081 }
2082 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002083}
2084
2085
2086void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002087 PopOperand(r4);
2088 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002089 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2090 CallIC(code, expr->BinaryOperationFeedbackId());
2091 patch_site.EmitPatchInfo();
2092 context()->Plug(r3);
2093}
2094
2095
2096void FullCodeGenerator::EmitAssignment(Expression* expr,
2097 FeedbackVectorSlot slot) {
2098 DCHECK(expr->IsValidReferenceExpressionOrThis());
2099
2100 Property* prop = expr->AsProperty();
2101 LhsKind assign_type = Property::GetAssignType(prop);
2102
2103 switch (assign_type) {
2104 case VARIABLE: {
2105 Variable* var = expr->AsVariableProxy()->var();
2106 EffectContext context(this);
2107 EmitVariableAssignment(var, Token::ASSIGN, slot);
2108 break;
2109 }
2110 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002111 PushOperand(r3); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112 VisitForAccumulatorValue(prop->obj());
2113 __ Move(StoreDescriptor::ReceiverRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002114 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002115 __ mov(StoreDescriptor::NameRegister(),
2116 Operand(prop->key()->AsLiteral()->value()));
2117 EmitLoadStoreICSlot(slot);
2118 CallStoreIC();
2119 break;
2120 }
2121 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002122 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2124 VisitForAccumulatorValue(
2125 prop->obj()->AsSuperPropertyReference()->home_object());
2126 // stack: value, this; r3: home_object
2127 Register scratch = r5;
2128 Register scratch2 = r6;
2129 __ mr(scratch, result_register()); // home_object
2130 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2131 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2132 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2133 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2134 // stack: this, home_object; r3: value
2135 EmitNamedSuperPropertyStore(prop);
2136 break;
2137 }
2138 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002139 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002140 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2141 VisitForStackValue(
2142 prop->obj()->AsSuperPropertyReference()->home_object());
2143 VisitForAccumulatorValue(prop->key());
2144 Register scratch = r5;
2145 Register scratch2 = r6;
2146 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2147 // stack: value, this, home_object; r3: key, r6: value
2148 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2149 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2150 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2151 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2152 __ StoreP(r3, MemOperand(sp, 0));
2153 __ Move(r3, scratch2);
2154 // stack: this, home_object, key; r3: value.
2155 EmitKeyedSuperPropertyStore(prop);
2156 break;
2157 }
2158 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002159 PushOperand(r3); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 VisitForStackValue(prop->obj());
2161 VisitForAccumulatorValue(prop->key());
2162 __ Move(StoreDescriptor::NameRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002163 PopOperands(StoreDescriptor::ValueRegister(),
2164 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002165 EmitLoadStoreICSlot(slot);
2166 Handle<Code> ic =
2167 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2168 CallIC(ic);
2169 break;
2170 }
2171 }
2172 context()->Plug(r3);
2173}
2174
2175
2176void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2177 Variable* var, MemOperand location) {
2178 __ StoreP(result_register(), location, r0);
2179 if (var->IsContextSlot()) {
2180 // RecordWrite may destroy all its register arguments.
2181 __ mr(r6, result_register());
2182 int offset = Context::SlotOffset(var->index());
2183 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2184 kDontSaveFPRegs);
2185 }
2186}
2187
2188
2189void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2190 FeedbackVectorSlot slot) {
2191 if (var->IsUnallocated()) {
2192 // Global var, const, or let.
2193 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2194 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2195 EmitLoadStoreICSlot(slot);
2196 CallStoreIC();
2197
2198 } else if (var->mode() == LET && op != Token::INIT) {
2199 // Non-initializing assignment to let variable needs a write barrier.
2200 DCHECK(!var->IsLookupSlot());
2201 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2202 Label assign;
2203 MemOperand location = VarOperand(var, r4);
2204 __ LoadP(r6, location);
2205 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2206 __ bne(&assign);
2207 __ mov(r6, Operand(var->name()));
2208 __ push(r6);
2209 __ CallRuntime(Runtime::kThrowReferenceError);
2210 // Perform the assignment.
2211 __ bind(&assign);
2212 EmitStoreToStackLocalOrContextSlot(var, location);
2213
2214 } else if (var->mode() == CONST && op != Token::INIT) {
2215 // Assignment to const variable needs a write barrier.
2216 DCHECK(!var->IsLookupSlot());
2217 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2218 Label const_error;
2219 MemOperand location = VarOperand(var, r4);
2220 __ LoadP(r6, location);
2221 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2222 __ bne(&const_error);
2223 __ mov(r6, Operand(var->name()));
2224 __ push(r6);
2225 __ CallRuntime(Runtime::kThrowReferenceError);
2226 __ bind(&const_error);
2227 __ CallRuntime(Runtime::kThrowConstAssignError);
2228
2229 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2230 // Initializing assignment to const {this} needs a write barrier.
2231 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2232 Label uninitialized_this;
2233 MemOperand location = VarOperand(var, r4);
2234 __ LoadP(r6, location);
2235 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2236 __ beq(&uninitialized_this);
2237 __ mov(r4, Operand(var->name()));
2238 __ push(r4);
2239 __ CallRuntime(Runtime::kThrowReferenceError);
2240 __ bind(&uninitialized_this);
2241 EmitStoreToStackLocalOrContextSlot(var, location);
2242
Ben Murdochc5610432016-08-08 18:44:38 +01002243 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002244 if (var->IsLookupSlot()) {
2245 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002246 __ Push(var->name());
2247 __ Push(r3);
2248 __ CallRuntime(is_strict(language_mode())
2249 ? Runtime::kStoreLookupSlot_Strict
2250 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251 } else {
2252 // Assignment to var or initializing assignment to let/const in harmony
2253 // mode.
2254 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2255 MemOperand location = VarOperand(var, r4);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002256 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 // Check for an uninitialized let binding.
2258 __ LoadP(r5, location);
2259 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2260 __ Check(eq, kLetBindingReInitialization);
2261 }
2262 EmitStoreToStackLocalOrContextSlot(var, location);
2263 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002264 } else {
2265 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2266 if (is_strict(language_mode())) {
2267 __ CallRuntime(Runtime::kThrowConstAssignError);
2268 }
2269 // Silently ignore store in sloppy mode.
2270 }
2271}
2272
2273
2274void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2275 // Assignment to a property, using a named store IC.
2276 Property* prop = expr->target()->AsProperty();
2277 DCHECK(prop != NULL);
2278 DCHECK(prop->key()->IsLiteral());
2279
2280 __ mov(StoreDescriptor::NameRegister(),
2281 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002282 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002283 EmitLoadStoreICSlot(expr->AssignmentSlot());
2284 CallStoreIC();
2285
Ben Murdochc5610432016-08-08 18:44:38 +01002286 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002287 context()->Plug(r3);
2288}
2289
2290
2291void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2292 // Assignment to named property of super.
2293 // r3 : value
2294 // stack : receiver ('this'), home_object
2295 DCHECK(prop != NULL);
2296 Literal* key = prop->key()->AsLiteral();
2297 DCHECK(key != NULL);
2298
Ben Murdoch097c5b22016-05-18 11:27:45 +01002299 PushOperand(key->value());
2300 PushOperand(r3);
2301 CallRuntimeWithOperands((is_strict(language_mode())
2302 ? Runtime::kStoreToSuper_Strict
2303 : Runtime::kStoreToSuper_Sloppy));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002304}
2305
2306
2307void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2308 // Assignment to named property of super.
2309 // r3 : value
2310 // stack : receiver ('this'), home_object, key
2311 DCHECK(prop != NULL);
2312
Ben Murdoch097c5b22016-05-18 11:27:45 +01002313 PushOperand(r3);
2314 CallRuntimeWithOperands((is_strict(language_mode())
2315 ? Runtime::kStoreKeyedToSuper_Strict
2316 : Runtime::kStoreKeyedToSuper_Sloppy));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002317}
2318
2319
2320void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2321 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002322 PopOperands(StoreDescriptor::ReceiverRegister(),
2323 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002324 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2325
2326 Handle<Code> ic =
2327 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2328 EmitLoadStoreICSlot(expr->AssignmentSlot());
2329 CallIC(ic);
2330
Ben Murdochc5610432016-08-08 18:44:38 +01002331 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002332 context()->Plug(r3);
2333}
2334
2335
2336void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2337 ic_total_count_++;
2338 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2339}
2340
2341
2342// Code common for calls using the IC.
2343void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2344 Expression* callee = expr->expression();
2345
2346 // Get the target function.
2347 ConvertReceiverMode convert_mode;
2348 if (callee->IsVariableProxy()) {
2349 {
2350 StackValueContext context(this);
2351 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002352 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002353 }
2354 // Push undefined as receiver. This is patched in the method prologue if it
2355 // is a sloppy mode method.
2356 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002357 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002358 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2359 } else {
2360 // Load the function from the receiver.
2361 DCHECK(callee->IsProperty());
2362 DCHECK(!callee->AsProperty()->IsSuperAccess());
2363 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2364 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002365 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2366 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002367 // Push the target function under the receiver.
2368 __ LoadP(r0, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002369 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002370 __ StoreP(r3, MemOperand(sp, kPointerSize));
2371 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2372 }
2373
2374 EmitCall(expr, convert_mode);
2375}
2376
2377
2378void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2379 Expression* callee = expr->expression();
2380 DCHECK(callee->IsProperty());
2381 Property* prop = callee->AsProperty();
2382 DCHECK(prop->IsSuperAccess());
2383 SetExpressionPosition(prop);
2384
2385 Literal* key = prop->key()->AsLiteral();
2386 DCHECK(!key->value()->IsSmi());
2387 // Load the function from the receiver.
2388 const Register scratch = r4;
2389 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2390 VisitForAccumulatorValue(super_ref->home_object());
2391 __ mr(scratch, r3);
2392 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002393 PushOperands(scratch, r3, r3, scratch);
2394 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002395
2396 // Stack here:
2397 // - home_object
2398 // - this (receiver)
2399 // - this (receiver) <-- LoadFromSuper will pop here and below.
2400 // - home_object
2401 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002402 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002403 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002404
2405 // Replace home_object with target function.
2406 __ StoreP(r3, MemOperand(sp, kPointerSize));
2407
2408 // Stack here:
2409 // - target function
2410 // - this (receiver)
2411 EmitCall(expr);
2412}
2413
2414
2415// Code common for calls using the IC.
2416void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2417 // Load the key.
2418 VisitForAccumulatorValue(key);
2419
2420 Expression* callee = expr->expression();
2421
2422 // Load the function from the receiver.
2423 DCHECK(callee->IsProperty());
2424 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2425 __ Move(LoadDescriptor::NameRegister(), r3);
2426 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002427 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2428 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002429
2430 // Push the target function under the receiver.
2431 __ LoadP(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002432 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002433 __ StoreP(r3, MemOperand(sp, kPointerSize));
2434
2435 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2436}
2437
2438
2439void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2440 Expression* callee = expr->expression();
2441 DCHECK(callee->IsProperty());
2442 Property* prop = callee->AsProperty();
2443 DCHECK(prop->IsSuperAccess());
2444
2445 SetExpressionPosition(prop);
2446 // Load the function from the receiver.
2447 const Register scratch = r4;
2448 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2449 VisitForAccumulatorValue(super_ref->home_object());
2450 __ mr(scratch, r3);
2451 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002452 PushOperands(scratch, r3, r3, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002453 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002454
2455 // Stack here:
2456 // - home_object
2457 // - this (receiver)
2458 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2459 // - home_object
2460 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002461 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002462 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002463
2464 // Replace home_object with target function.
2465 __ StoreP(r3, MemOperand(sp, kPointerSize));
2466
2467 // Stack here:
2468 // - target function
2469 // - this (receiver)
2470 EmitCall(expr);
2471}
2472
2473
2474void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2475 // Load the arguments.
2476 ZoneList<Expression*>* args = expr->arguments();
2477 int arg_count = args->length();
2478 for (int i = 0; i < arg_count; i++) {
2479 VisitForStackValue(args->at(i));
2480 }
2481
Ben Murdochc5610432016-08-08 18:44:38 +01002482 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002483 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002484 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2485 if (FLAG_trace) {
2486 __ CallRuntime(Runtime::kTraceTailCall);
2487 }
2488 // Update profiling counters before the tail call since we will
2489 // not return to this function.
2490 EmitProfilingCounterHandlingForReturnSequence(true);
2491 }
2492 Handle<Code> ic =
2493 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2494 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002495 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
2496 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2497 // Don't assign a type feedback id to the IC, since type feedback is provided
2498 // by the vector above.
2499 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002500 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002501
2502 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002503 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002504 context()->DropAndPlug(1, r3);
2505}
2506
2507
Ben Murdochc5610432016-08-08 18:44:38 +01002508void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2509 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002510 // r7: copy of the first argument or undefined if it doesn't exist.
2511 if (arg_count > 0) {
2512 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
2513 } else {
2514 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2515 }
2516
2517 // r6: the receiver of the enclosing function.
2518 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2519
2520 // r5: language mode.
2521 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
2522
2523 // r4: the start position of the scope the calls resides in.
2524 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
2525
Ben Murdochc5610432016-08-08 18:44:38 +01002526 // r3: the source position of the eval call.
2527 __ LoadSmiLiteral(r3, Smi::FromInt(expr->position()));
2528
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002529 // Do the runtime call.
Ben Murdochc5610432016-08-08 18:44:38 +01002530 __ Push(r7, r6, r5, r4, r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002531 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2532}
2533
2534
2535// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2536void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2537 VariableProxy* callee = expr->expression()->AsVariableProxy();
2538 if (callee->var()->IsLookupSlot()) {
2539 Label slow, done;
2540 SetExpressionPosition(callee);
2541 // Generate code for loading from variables potentially shadowed by
2542 // eval-introduced variables.
2543 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2544
2545 __ bind(&slow);
2546 // Call the runtime to find the function to call (returned in r3) and
2547 // the object holding it (returned in r4).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002548 __ Push(callee->name());
2549 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2550 PushOperands(r3, r4); // Function, receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002551 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002552
2553 // If fast case code has been generated, emit code to push the function
2554 // and receiver and have the slow path jump around this code.
2555 if (done.is_linked()) {
2556 Label call;
2557 __ b(&call);
2558 __ bind(&done);
2559 // Push function.
2560 __ push(r3);
2561 // Pass undefined as the receiver, which is the WithBaseObject of a
2562 // non-object environment record. If the callee is sloppy, it will patch
2563 // it up to be the global receiver.
2564 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2565 __ push(r4);
2566 __ bind(&call);
2567 }
2568 } else {
2569 VisitForStackValue(callee);
2570 // refEnv.WithBaseObject()
2571 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002572 PushOperand(r5); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002573 }
2574}
2575
2576
2577void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002578 // In a call to eval, we first call
2579 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
2580 // to call. Then we call the resolved function using the given arguments.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002581 ZoneList<Expression*>* args = expr->arguments();
2582 int arg_count = args->length();
2583
2584 PushCalleeAndWithBaseObject(expr);
2585
2586 // Push the arguments.
2587 for (int i = 0; i < arg_count; i++) {
2588 VisitForStackValue(args->at(i));
2589 }
2590
2591 // Push a copy of the function (found below the arguments) and
2592 // resolve eval.
2593 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2594 __ push(r4);
Ben Murdochc5610432016-08-08 18:44:38 +01002595 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002596
2597 // Touch up the stack with the resolved function.
2598 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2599
Ben Murdochc5610432016-08-08 18:44:38 +01002600 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002601
2602 // Record source position for debugger.
2603 SetCallPosition(expr);
2604 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2605 __ mov(r3, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002606 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2607 expr->tail_call_mode()),
2608 RelocInfo::CODE_TARGET);
2609 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002610 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002611 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002612 context()->DropAndPlug(1, r3);
2613}
2614
2615
2616void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2617 Comment cmnt(masm_, "[ CallNew");
2618 // According to ECMA-262, section 11.2.2, page 44, the function
2619 // expression in new calls must be evaluated before the
2620 // arguments.
2621
2622 // Push constructor on the stack. If it's not a function it's used as
2623 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2624 // ignored.
2625 DCHECK(!expr->expression()->IsSuperPropertyReference());
2626 VisitForStackValue(expr->expression());
2627
2628 // Push the arguments ("left-to-right") on the stack.
2629 ZoneList<Expression*>* args = expr->arguments();
2630 int arg_count = args->length();
2631 for (int i = 0; i < arg_count; i++) {
2632 VisitForStackValue(args->at(i));
2633 }
2634
2635 // Call the construct call builtin that handles allocation and
2636 // constructor invocation.
2637 SetConstructCallPosition(expr);
2638
2639 // Load function and argument count into r4 and r3.
2640 __ mov(r3, Operand(arg_count));
2641 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2642
2643 // Record call targets in unoptimized code.
2644 __ EmitLoadTypeFeedbackVector(r5);
2645 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
2646
2647 CallConstructStub stub(isolate());
2648 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002649 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002650 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2651 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002652 context()->Plug(r3);
2653}
2654
2655
2656void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2657 SuperCallReference* super_call_ref =
2658 expr->expression()->AsSuperCallReference();
2659 DCHECK_NOT_NULL(super_call_ref);
2660
2661 // Push the super constructor target on the stack (may be null,
2662 // but the Construct builtin can deal with that properly).
2663 VisitForAccumulatorValue(super_call_ref->this_function_var());
2664 __ AssertFunction(result_register());
2665 __ LoadP(result_register(),
2666 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2667 __ LoadP(result_register(),
2668 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002669 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002670
2671 // Push the arguments ("left-to-right") on the stack.
2672 ZoneList<Expression*>* args = expr->arguments();
2673 int arg_count = args->length();
2674 for (int i = 0; i < arg_count; i++) {
2675 VisitForStackValue(args->at(i));
2676 }
2677
2678 // Call the construct call builtin that handles allocation and
2679 // constructor invocation.
2680 SetConstructCallPosition(expr);
2681
2682 // Load new target into r6.
2683 VisitForAccumulatorValue(super_call_ref->new_target_var());
2684 __ mr(r6, result_register());
2685
2686 // Load function and argument count into r1 and r0.
2687 __ mov(r3, Operand(arg_count));
2688 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
2689
2690 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002691 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002692
2693 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002694 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002695 context()->Plug(r3);
2696}
2697
2698
2699void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2700 ZoneList<Expression*>* args = expr->arguments();
2701 DCHECK(args->length() == 1);
2702
2703 VisitForAccumulatorValue(args->at(0));
2704
2705 Label materialize_true, materialize_false;
2706 Label* if_true = NULL;
2707 Label* if_false = NULL;
2708 Label* fall_through = NULL;
2709 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2710 &if_false, &fall_through);
2711
2712 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2713 __ TestIfSmi(r3, r0);
2714 Split(eq, if_true, if_false, fall_through, cr0);
2715
2716 context()->Plug(if_true, if_false);
2717}
2718
2719
2720void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2721 ZoneList<Expression*>* args = expr->arguments();
2722 DCHECK(args->length() == 1);
2723
2724 VisitForAccumulatorValue(args->at(0));
2725
2726 Label materialize_true, materialize_false;
2727 Label* if_true = NULL;
2728 Label* if_false = NULL;
2729 Label* fall_through = NULL;
2730 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2731 &if_false, &fall_through);
2732
2733 __ JumpIfSmi(r3, if_false);
2734 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2735 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2736 Split(ge, if_true, if_false, fall_through);
2737
2738 context()->Plug(if_true, if_false);
2739}
2740
2741
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002742void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2743 ZoneList<Expression*>* args = expr->arguments();
2744 DCHECK(args->length() == 1);
2745
2746 VisitForAccumulatorValue(args->at(0));
2747
2748 Label materialize_true, materialize_false;
2749 Label* if_true = NULL;
2750 Label* if_false = NULL;
2751 Label* fall_through = NULL;
2752 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2753 &if_false, &fall_through);
2754
2755 __ JumpIfSmi(r3, if_false);
2756 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
2757 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2758 Split(eq, if_true, if_false, fall_through);
2759
2760 context()->Plug(if_true, if_false);
2761}
2762
2763
2764void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2765 ZoneList<Expression*>* args = expr->arguments();
2766 DCHECK(args->length() == 1);
2767
2768 VisitForAccumulatorValue(args->at(0));
2769
2770 Label materialize_true, materialize_false;
2771 Label* if_true = NULL;
2772 Label* if_false = NULL;
2773 Label* fall_through = NULL;
2774 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2775 &if_false, &fall_through);
2776
2777 __ JumpIfSmi(r3, if_false);
2778 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
2779 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2780 Split(eq, if_true, if_false, fall_through);
2781
2782 context()->Plug(if_true, if_false);
2783}
2784
2785
2786void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2787 ZoneList<Expression*>* args = expr->arguments();
2788 DCHECK(args->length() == 1);
2789
2790 VisitForAccumulatorValue(args->at(0));
2791
2792 Label materialize_true, materialize_false;
2793 Label* if_true = NULL;
2794 Label* if_false = NULL;
2795 Label* fall_through = NULL;
2796 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2797 &if_false, &fall_through);
2798
2799 __ JumpIfSmi(r3, if_false);
2800 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
2801 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2802 Split(eq, if_true, if_false, fall_through);
2803
2804 context()->Plug(if_true, if_false);
2805}
2806
2807
2808void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2809 ZoneList<Expression*>* args = expr->arguments();
2810 DCHECK(args->length() == 1);
2811
2812 VisitForAccumulatorValue(args->at(0));
2813
2814 Label materialize_true, materialize_false;
2815 Label* if_true = NULL;
2816 Label* if_false = NULL;
2817 Label* fall_through = NULL;
2818 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2819 &if_false, &fall_through);
2820
2821 __ JumpIfSmi(r3, if_false);
2822 __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE);
2823 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2824 Split(eq, if_true, if_false, fall_through);
2825
2826 context()->Plug(if_true, if_false);
2827}
2828
2829
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002830void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2831 ZoneList<Expression*>* args = expr->arguments();
2832 DCHECK(args->length() == 1);
2833 Label done, null, function, non_function_constructor;
2834
2835 VisitForAccumulatorValue(args->at(0));
2836
2837 // If the object is not a JSReceiver, we return null.
2838 __ JumpIfSmi(r3, &null);
2839 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2840 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
2841 // Map is now in r3.
2842 __ blt(&null);
2843
Ben Murdochda12d292016-06-02 14:46:10 +01002844 // Return 'Function' for JSFunction and JSBoundFunction objects.
2845 __ cmpli(r4, Operand(FIRST_FUNCTION_TYPE));
2846 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2847 __ bge(&function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002848
2849 // Check if the constructor in the map is a JS function.
2850 Register instance_type = r5;
2851 __ GetMapConstructor(r3, r3, r4, instance_type);
2852 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
2853 __ bne(&non_function_constructor);
2854
2855 // r3 now contains the constructor function. Grab the
2856 // instance class name from there.
2857 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2858 __ LoadP(r3,
2859 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
2860 __ b(&done);
2861
2862 // Functions have class 'Function'.
2863 __ bind(&function);
2864 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
2865 __ b(&done);
2866
2867 // Objects with a non-function constructor have class 'Object'.
2868 __ bind(&non_function_constructor);
2869 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
2870 __ b(&done);
2871
2872 // Non-JS objects have class null.
2873 __ bind(&null);
2874 __ LoadRoot(r3, Heap::kNullValueRootIndex);
2875
2876 // All done.
2877 __ bind(&done);
2878
2879 context()->Plug(r3);
2880}
2881
2882
2883void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2884 ZoneList<Expression*>* args = expr->arguments();
2885 DCHECK(args->length() == 1);
2886 VisitForAccumulatorValue(args->at(0)); // Load the object.
2887
2888 Label done;
2889 // If the object is a smi return the object.
2890 __ JumpIfSmi(r3, &done);
2891 // If the object is not a value type, return the object.
2892 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
2893 __ bne(&done);
2894 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
2895
2896 __ bind(&done);
2897 context()->Plug(r3);
2898}
2899
2900
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002901void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2902 ZoneList<Expression*>* args = expr->arguments();
2903 DCHECK_EQ(3, args->length());
2904
2905 Register string = r3;
2906 Register index = r4;
2907 Register value = r5;
2908
2909 VisitForStackValue(args->at(0)); // index
2910 VisitForStackValue(args->at(1)); // value
2911 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002912 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002913
2914 if (FLAG_debug_code) {
2915 __ TestIfSmi(value, r0);
2916 __ Check(eq, kNonSmiValue, cr0);
2917 __ TestIfSmi(index, r0);
2918 __ Check(eq, kNonSmiIndex, cr0);
2919 __ SmiUntag(index, index);
2920 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2921 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
2922 __ SmiTag(index, index);
2923 }
2924
2925 __ SmiUntag(value);
2926 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2927 __ SmiToByteArrayOffset(r0, index);
2928 __ stbx(value, MemOperand(ip, r0));
2929 context()->Plug(string);
2930}
2931
2932
2933void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2934 ZoneList<Expression*>* args = expr->arguments();
2935 DCHECK_EQ(3, args->length());
2936
2937 Register string = r3;
2938 Register index = r4;
2939 Register value = r5;
2940
2941 VisitForStackValue(args->at(0)); // index
2942 VisitForStackValue(args->at(1)); // value
2943 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002944 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002945
2946 if (FLAG_debug_code) {
2947 __ TestIfSmi(value, r0);
2948 __ Check(eq, kNonSmiValue, cr0);
2949 __ TestIfSmi(index, r0);
2950 __ Check(eq, kNonSmiIndex, cr0);
2951 __ SmiUntag(index, index);
2952 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2953 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
2954 __ SmiTag(index, index);
2955 }
2956
2957 __ SmiUntag(value);
2958 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2959 __ SmiToShortArrayOffset(r0, index);
2960 __ sthx(value, MemOperand(ip, r0));
2961 context()->Plug(string);
2962}
2963
2964
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002965void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2966 ZoneList<Expression*>* args = expr->arguments();
2967 DCHECK(args->length() == 1);
2968 VisitForAccumulatorValue(args->at(0));
2969
2970 Label done;
2971 StringCharFromCodeGenerator generator(r3, r4);
2972 generator.GenerateFast(masm_);
2973 __ b(&done);
2974
2975 NopRuntimeCallHelper call_helper;
2976 generator.GenerateSlow(masm_, call_helper);
2977
2978 __ bind(&done);
2979 context()->Plug(r4);
2980}
2981
2982
2983void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2984 ZoneList<Expression*>* args = expr->arguments();
2985 DCHECK(args->length() == 2);
2986 VisitForStackValue(args->at(0));
2987 VisitForAccumulatorValue(args->at(1));
2988
2989 Register object = r4;
2990 Register index = r3;
2991 Register result = r6;
2992
Ben Murdoch097c5b22016-05-18 11:27:45 +01002993 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002994
2995 Label need_conversion;
2996 Label index_out_of_range;
2997 Label done;
2998 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2999 &need_conversion, &index_out_of_range,
3000 STRING_INDEX_IS_NUMBER);
3001 generator.GenerateFast(masm_);
3002 __ b(&done);
3003
3004 __ bind(&index_out_of_range);
3005 // When the index is out of range, the spec requires us to return
3006 // NaN.
3007 __ LoadRoot(result, Heap::kNanValueRootIndex);
3008 __ b(&done);
3009
3010 __ bind(&need_conversion);
3011 // Load the undefined value into the result register, which will
3012 // trigger conversion.
3013 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3014 __ b(&done);
3015
3016 NopRuntimeCallHelper call_helper;
3017 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3018
3019 __ bind(&done);
3020 context()->Plug(result);
3021}
3022
3023
3024void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3025 ZoneList<Expression*>* args = expr->arguments();
3026 DCHECK(args->length() == 2);
3027 VisitForStackValue(args->at(0));
3028 VisitForAccumulatorValue(args->at(1));
3029
3030 Register object = r4;
3031 Register index = r3;
3032 Register scratch = r6;
3033 Register result = r3;
3034
Ben Murdoch097c5b22016-05-18 11:27:45 +01003035 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003036
3037 Label need_conversion;
3038 Label index_out_of_range;
3039 Label done;
3040 StringCharAtGenerator generator(object, index, scratch, result,
3041 &need_conversion, &need_conversion,
3042 &index_out_of_range, STRING_INDEX_IS_NUMBER);
3043 generator.GenerateFast(masm_);
3044 __ b(&done);
3045
3046 __ bind(&index_out_of_range);
3047 // When the index is out of range, the spec requires us to return
3048 // the empty string.
3049 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3050 __ b(&done);
3051
3052 __ bind(&need_conversion);
3053 // Move smi zero into the result register, which will trigger
3054 // conversion.
3055 __ LoadSmiLiteral(result, Smi::FromInt(0));
3056 __ b(&done);
3057
3058 NopRuntimeCallHelper call_helper;
3059 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3060
3061 __ bind(&done);
3062 context()->Plug(result);
3063}
3064
3065
3066void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3067 ZoneList<Expression*>* args = expr->arguments();
3068 DCHECK_LE(2, args->length());
3069 // Push target, receiver and arguments onto the stack.
3070 for (Expression* const arg : *args) {
3071 VisitForStackValue(arg);
3072 }
Ben Murdochc5610432016-08-08 18:44:38 +01003073 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003074 // Move target to r4.
3075 int const argc = args->length() - 2;
3076 __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize));
3077 // Call the target.
3078 __ mov(r3, Operand(argc));
3079 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003080 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003081 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003082 // Discard the function left on TOS.
3083 context()->DropAndPlug(1, r3);
3084}
3085
3086
3087void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3088 ZoneList<Expression*>* args = expr->arguments();
3089 VisitForAccumulatorValue(args->at(0));
3090
3091 Label materialize_true, materialize_false;
3092 Label* if_true = NULL;
3093 Label* if_false = NULL;
3094 Label* fall_through = NULL;
3095 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3096 &if_false, &fall_through);
3097
3098 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3099 // PPC - assume ip is free
3100 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
3101 __ and_(r0, r3, ip, SetRC);
3102 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3103 Split(eq, if_true, if_false, fall_through, cr0);
3104
3105 context()->Plug(if_true, if_false);
3106}
3107
3108
3109void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3110 ZoneList<Expression*>* args = expr->arguments();
3111 DCHECK(args->length() == 1);
3112 VisitForAccumulatorValue(args->at(0));
3113
3114 __ AssertString(r3);
3115
3116 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3117 __ IndexFromHash(r3, r3);
3118
3119 context()->Plug(r3);
3120}
3121
3122
3123void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3124 ZoneList<Expression*>* args = expr->arguments();
3125 DCHECK_EQ(1, args->length());
3126 VisitForAccumulatorValue(args->at(0));
3127 __ AssertFunction(r3);
3128 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3129 __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset));
3130 context()->Plug(r3);
3131}
3132
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003133void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3134 DCHECK(expr->arguments()->length() == 0);
3135 ExternalReference debug_is_active =
3136 ExternalReference::debug_is_active_address(isolate());
3137 __ mov(ip, Operand(debug_is_active));
3138 __ lbz(r3, MemOperand(ip));
3139 __ SmiTag(r3);
3140 context()->Plug(r3);
3141}
3142
3143
3144void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3145 ZoneList<Expression*>* args = expr->arguments();
3146 DCHECK_EQ(2, args->length());
3147 VisitForStackValue(args->at(0));
3148 VisitForStackValue(args->at(1));
3149
3150 Label runtime, done;
3151
Ben Murdochc5610432016-08-08 18:44:38 +01003152 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime,
3153 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003154 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
3155 __ Pop(r5, r6);
3156 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
3157 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
3158 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
3159 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
3160 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
3161 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
3162 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3163 __ b(&done);
3164
3165 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003166 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003167
3168 __ bind(&done);
3169 context()->Plug(r3);
3170}
3171
3172
3173void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003174 // Push function.
3175 __ LoadNativeContextSlot(expr->context_index(), r3);
3176 PushOperand(r3);
3177
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003178 // Push undefined as the receiver.
3179 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003180 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003181}
3182
3183
3184void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3185 ZoneList<Expression*>* args = expr->arguments();
3186 int arg_count = args->length();
3187
3188 SetCallPosition(expr);
3189 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3190 __ mov(r3, Operand(arg_count));
3191 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3192 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003193 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003194 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003195}
3196
3197
3198void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3199 switch (expr->op()) {
3200 case Token::DELETE: {
3201 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3202 Property* property = expr->expression()->AsProperty();
3203 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3204
3205 if (property != NULL) {
3206 VisitForStackValue(property->obj());
3207 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003208 CallRuntimeWithOperands(is_strict(language_mode())
3209 ? Runtime::kDeleteProperty_Strict
3210 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003211 context()->Plug(r3);
3212 } else if (proxy != NULL) {
3213 Variable* var = proxy->var();
3214 // Delete of an unqualified identifier is disallowed in strict mode but
3215 // "delete this" is allowed.
3216 bool is_this = var->HasThisName(isolate());
3217 DCHECK(is_sloppy(language_mode()) || is_this);
3218 if (var->IsUnallocatedOrGlobalSlot()) {
3219 __ LoadGlobalObject(r5);
3220 __ mov(r4, Operand(var->name()));
3221 __ Push(r5, r4);
3222 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3223 context()->Plug(r3);
3224 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3225 // Result of deleting non-global, non-dynamic variables is false.
3226 // The subexpression does not have side effects.
3227 context()->Plug(is_this);
3228 } else {
3229 // Non-global variable. Call the runtime to try to delete from the
3230 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003231 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003232 __ CallRuntime(Runtime::kDeleteLookupSlot);
3233 context()->Plug(r3);
3234 }
3235 } else {
3236 // Result of deleting non-property, non-variable reference is true.
3237 // The subexpression may have side effects.
3238 VisitForEffect(expr->expression());
3239 context()->Plug(true);
3240 }
3241 break;
3242 }
3243
3244 case Token::VOID: {
3245 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3246 VisitForEffect(expr->expression());
3247 context()->Plug(Heap::kUndefinedValueRootIndex);
3248 break;
3249 }
3250
3251 case Token::NOT: {
3252 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3253 if (context()->IsEffect()) {
3254 // Unary NOT has no side effects so it's only necessary to visit the
3255 // subexpression. Match the optimizing compiler by not branching.
3256 VisitForEffect(expr->expression());
3257 } else if (context()->IsTest()) {
3258 const TestContext* test = TestContext::cast(context());
3259 // The labels are swapped for the recursive call.
3260 VisitForControl(expr->expression(), test->false_label(),
3261 test->true_label(), test->fall_through());
3262 context()->Plug(test->true_label(), test->false_label());
3263 } else {
3264 // We handle value contexts explicitly rather than simply visiting
3265 // for control and plugging the control flow into the context,
3266 // because we need to prepare a pair of extra administrative AST ids
3267 // for the optimizing compiler.
3268 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3269 Label materialize_true, materialize_false, done;
3270 VisitForControl(expr->expression(), &materialize_false,
3271 &materialize_true, &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003272 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003273 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003274 PrepareForBailoutForId(expr->MaterializeTrueId(),
3275 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003276 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
3277 if (context()->IsStackValue()) __ push(r3);
3278 __ b(&done);
3279 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003280 PrepareForBailoutForId(expr->MaterializeFalseId(),
3281 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003282 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
3283 if (context()->IsStackValue()) __ push(r3);
3284 __ bind(&done);
3285 }
3286 break;
3287 }
3288
3289 case Token::TYPEOF: {
3290 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3291 {
3292 AccumulatorValueContext context(this);
3293 VisitForTypeofValue(expr->expression());
3294 }
3295 __ mr(r6, r3);
3296 TypeofStub typeof_stub(isolate());
3297 __ CallStub(&typeof_stub);
3298 context()->Plug(r3);
3299 break;
3300 }
3301
3302 default:
3303 UNREACHABLE();
3304 }
3305}
3306
3307
3308void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3309 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3310
3311 Comment cmnt(masm_, "[ CountOperation");
3312
3313 Property* prop = expr->expression()->AsProperty();
3314 LhsKind assign_type = Property::GetAssignType(prop);
3315
3316 // Evaluate expression and get value.
3317 if (assign_type == VARIABLE) {
3318 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3319 AccumulatorValueContext context(this);
3320 EmitVariableLoad(expr->expression()->AsVariableProxy());
3321 } else {
3322 // Reserve space for result of postfix operation.
3323 if (expr->is_postfix() && !context()->IsEffect()) {
3324 __ LoadSmiLiteral(ip, Smi::FromInt(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003325 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003326 }
3327 switch (assign_type) {
3328 case NAMED_PROPERTY: {
3329 // Put the object both on the stack and in the register.
3330 VisitForStackValue(prop->obj());
3331 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3332 EmitNamedPropertyLoad(prop);
3333 break;
3334 }
3335
3336 case NAMED_SUPER_PROPERTY: {
3337 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3338 VisitForAccumulatorValue(
3339 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003340 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003341 const Register scratch = r4;
3342 __ LoadP(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003343 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003344 EmitNamedSuperPropertyLoad(prop);
3345 break;
3346 }
3347
3348 case KEYED_SUPER_PROPERTY: {
3349 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3350 VisitForAccumulatorValue(
3351 prop->obj()->AsSuperPropertyReference()->home_object());
3352 const Register scratch = r4;
3353 const Register scratch1 = r5;
3354 __ mr(scratch, result_register());
3355 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003356 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003357 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003358 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003359 EmitKeyedSuperPropertyLoad(prop);
3360 break;
3361 }
3362
3363 case KEYED_PROPERTY: {
3364 VisitForStackValue(prop->obj());
3365 VisitForStackValue(prop->key());
3366 __ LoadP(LoadDescriptor::ReceiverRegister(),
3367 MemOperand(sp, 1 * kPointerSize));
3368 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3369 EmitKeyedPropertyLoad(prop);
3370 break;
3371 }
3372
3373 case VARIABLE:
3374 UNREACHABLE();
3375 }
3376 }
3377
3378 // We need a second deoptimization point after loading the value
3379 // in case evaluating the property load my have a side effect.
3380 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003381 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003382 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003383 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003384 }
3385
3386 // Inline smi case if we are in a loop.
3387 Label stub_call, done;
3388 JumpPatchSite patch_site(masm_);
3389
3390 int count_value = expr->op() == Token::INC ? 1 : -1;
3391 if (ShouldInlineSmiCase(expr->op())) {
3392 Label slow;
3393 patch_site.EmitJumpIfNotSmi(r3, &slow);
3394
3395 // Save result for postfix expressions.
3396 if (expr->is_postfix()) {
3397 if (!context()->IsEffect()) {
3398 // Save the result on the stack. If we have a named or keyed property
3399 // we store the result under the receiver that is currently on top
3400 // of the stack.
3401 switch (assign_type) {
3402 case VARIABLE:
3403 __ push(r3);
3404 break;
3405 case NAMED_PROPERTY:
3406 __ StoreP(r3, MemOperand(sp, kPointerSize));
3407 break;
3408 case NAMED_SUPER_PROPERTY:
3409 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3410 break;
3411 case KEYED_PROPERTY:
3412 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3413 break;
3414 case KEYED_SUPER_PROPERTY:
3415 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3416 break;
3417 }
3418 }
3419 }
3420
3421 Register scratch1 = r4;
3422 Register scratch2 = r5;
3423 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
3424 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
3425 __ BranchOnNoOverflow(&done);
3426 // Call stub. Undo operation first.
3427 __ sub(r3, r3, scratch1);
3428 __ b(&stub_call);
3429 __ bind(&slow);
3430 }
Ben Murdochda12d292016-06-02 14:46:10 +01003431
3432 // Convert old value into a number.
3433 ToNumberStub convert_stub(isolate());
3434 __ CallStub(&convert_stub);
Ben Murdochc5610432016-08-08 18:44:38 +01003435 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003436
3437 // Save result for postfix expressions.
3438 if (expr->is_postfix()) {
3439 if (!context()->IsEffect()) {
3440 // Save the result on the stack. If we have a named or keyed property
3441 // we store the result under the receiver that is currently on top
3442 // of the stack.
3443 switch (assign_type) {
3444 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003445 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003446 break;
3447 case NAMED_PROPERTY:
3448 __ StoreP(r3, MemOperand(sp, kPointerSize));
3449 break;
3450 case NAMED_SUPER_PROPERTY:
3451 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3452 break;
3453 case KEYED_PROPERTY:
3454 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3455 break;
3456 case KEYED_SUPER_PROPERTY:
3457 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3458 break;
3459 }
3460 }
3461 }
3462
3463 __ bind(&stub_call);
3464 __ mr(r4, r3);
3465 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
3466
3467 SetExpressionPosition(expr);
3468
Ben Murdoch097c5b22016-05-18 11:27:45 +01003469 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003470 CallIC(code, expr->CountBinOpFeedbackId());
3471 patch_site.EmitPatchInfo();
3472 __ bind(&done);
3473
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003474 // Store the value returned in r3.
3475 switch (assign_type) {
3476 case VARIABLE:
3477 if (expr->is_postfix()) {
3478 {
3479 EffectContext context(this);
3480 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3481 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003482 PrepareForBailoutForId(expr->AssignmentId(),
3483 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003484 context.Plug(r3);
3485 }
3486 // For all contexts except EffectConstant We have the result on
3487 // top of the stack.
3488 if (!context()->IsEffect()) {
3489 context()->PlugTOS();
3490 }
3491 } else {
3492 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3493 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003494 PrepareForBailoutForId(expr->AssignmentId(),
3495 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003496 context()->Plug(r3);
3497 }
3498 break;
3499 case NAMED_PROPERTY: {
3500 __ mov(StoreDescriptor::NameRegister(),
3501 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003502 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003503 EmitLoadStoreICSlot(expr->CountSlot());
3504 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003505 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003506 if (expr->is_postfix()) {
3507 if (!context()->IsEffect()) {
3508 context()->PlugTOS();
3509 }
3510 } else {
3511 context()->Plug(r3);
3512 }
3513 break;
3514 }
3515 case NAMED_SUPER_PROPERTY: {
3516 EmitNamedSuperPropertyStore(prop);
3517 if (expr->is_postfix()) {
3518 if (!context()->IsEffect()) {
3519 context()->PlugTOS();
3520 }
3521 } else {
3522 context()->Plug(r3);
3523 }
3524 break;
3525 }
3526 case KEYED_SUPER_PROPERTY: {
3527 EmitKeyedSuperPropertyStore(prop);
3528 if (expr->is_postfix()) {
3529 if (!context()->IsEffect()) {
3530 context()->PlugTOS();
3531 }
3532 } else {
3533 context()->Plug(r3);
3534 }
3535 break;
3536 }
3537 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003538 PopOperands(StoreDescriptor::ReceiverRegister(),
3539 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003540 Handle<Code> ic =
3541 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3542 EmitLoadStoreICSlot(expr->CountSlot());
3543 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003544 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003545 if (expr->is_postfix()) {
3546 if (!context()->IsEffect()) {
3547 context()->PlugTOS();
3548 }
3549 } else {
3550 context()->Plug(r3);
3551 }
3552 break;
3553 }
3554 }
3555}
3556
3557
3558void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3559 Expression* sub_expr,
3560 Handle<String> check) {
3561 Label materialize_true, materialize_false;
3562 Label* if_true = NULL;
3563 Label* if_false = NULL;
3564 Label* fall_through = NULL;
3565 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3566 &if_false, &fall_through);
3567
3568 {
3569 AccumulatorValueContext context(this);
3570 VisitForTypeofValue(sub_expr);
3571 }
3572 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3573
3574 Factory* factory = isolate()->factory();
3575 if (String::Equals(check, factory->number_string())) {
3576 __ JumpIfSmi(r3, if_true);
3577 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3578 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3579 __ cmp(r3, ip);
3580 Split(eq, if_true, if_false, fall_through);
3581 } else if (String::Equals(check, factory->string_string())) {
3582 __ JumpIfSmi(r3, if_false);
3583 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
3584 Split(lt, if_true, if_false, fall_through);
3585 } else if (String::Equals(check, factory->symbol_string())) {
3586 __ JumpIfSmi(r3, if_false);
3587 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
3588 Split(eq, if_true, if_false, fall_through);
3589 } else if (String::Equals(check, factory->boolean_string())) {
3590 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3591 __ beq(if_true);
3592 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
3593 Split(eq, if_true, if_false, fall_through);
3594 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003595 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3596 __ beq(if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003597 __ JumpIfSmi(r3, if_false);
3598 // Check for undetectable objects => true.
3599 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3600 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3601 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3602 Split(ne, if_true, if_false, fall_through, cr0);
3603
3604 } else if (String::Equals(check, factory->function_string())) {
3605 __ JumpIfSmi(r3, if_false);
3606 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3607 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3608 __ andi(r4, r4,
3609 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3610 __ cmpi(r4, Operand(1 << Map::kIsCallable));
3611 Split(eq, if_true, if_false, fall_through);
3612 } else if (String::Equals(check, factory->object_string())) {
3613 __ JumpIfSmi(r3, if_false);
3614 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3615 __ beq(if_true);
3616 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3617 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
3618 __ blt(if_false);
3619 // Check for callable or undetectable objects => false.
3620 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3621 __ andi(r0, r4,
3622 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3623 Split(eq, if_true, if_false, fall_through, cr0);
3624// clang-format off
3625#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3626 } else if (String::Equals(check, factory->type##_string())) { \
3627 __ JumpIfSmi(r3, if_false); \
3628 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); \
3629 __ CompareRoot(r3, Heap::k##Type##MapRootIndex); \
3630 Split(eq, if_true, if_false, fall_through);
3631 SIMD128_TYPES(SIMD128_TYPE)
3632#undef SIMD128_TYPE
3633 // clang-format on
3634 } else {
3635 if (if_false != fall_through) __ b(if_false);
3636 }
3637 context()->Plug(if_true, if_false);
3638}
3639
3640
3641void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3642 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003643
3644 // First we try a fast inlined version of the compare when one of
3645 // the operands is a literal.
3646 if (TryLiteralCompare(expr)) return;
3647
3648 // Always perform the comparison for its control flow. Pack the result
3649 // into the expression's context after the comparison is performed.
3650 Label materialize_true, materialize_false;
3651 Label* if_true = NULL;
3652 Label* if_false = NULL;
3653 Label* fall_through = NULL;
3654 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3655 &if_false, &fall_through);
3656
3657 Token::Value op = expr->op();
3658 VisitForStackValue(expr->left());
3659 switch (op) {
3660 case Token::IN:
3661 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003662 SetExpressionPosition(expr);
3663 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003664 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3665 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3666 Split(eq, if_true, if_false, fall_through);
3667 break;
3668
3669 case Token::INSTANCEOF: {
3670 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003671 SetExpressionPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003672 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003673 InstanceOfStub stub(isolate());
3674 __ CallStub(&stub);
3675 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3676 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3677 Split(eq, if_true, if_false, fall_through);
3678 break;
3679 }
3680
3681 default: {
3682 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003683 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003684 Condition cond = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003685 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003686
3687 bool inline_smi_code = ShouldInlineSmiCase(op);
3688 JumpPatchSite patch_site(masm_);
3689 if (inline_smi_code) {
3690 Label slow_case;
3691 __ orx(r5, r3, r4);
3692 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
3693 __ cmp(r4, r3);
3694 Split(cond, if_true, if_false, NULL);
3695 __ bind(&slow_case);
3696 }
3697
Ben Murdoch097c5b22016-05-18 11:27:45 +01003698 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003699 CallIC(ic, expr->CompareOperationFeedbackId());
3700 patch_site.EmitPatchInfo();
3701 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3702 __ cmpi(r3, Operand::Zero());
3703 Split(cond, if_true, if_false, fall_through);
3704 }
3705 }
3706
3707 // Convert the result of the comparison into one expected for this
3708 // expression's context.
3709 context()->Plug(if_true, if_false);
3710}
3711
3712
3713void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3714 Expression* sub_expr,
3715 NilValue nil) {
3716 Label materialize_true, materialize_false;
3717 Label* if_true = NULL;
3718 Label* if_false = NULL;
3719 Label* fall_through = NULL;
3720 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3721 &if_false, &fall_through);
3722
3723 VisitForAccumulatorValue(sub_expr);
3724 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3725 if (expr->op() == Token::EQ_STRICT) {
3726 Heap::RootListIndex nil_value = nil == kNullValue
3727 ? Heap::kNullValueRootIndex
3728 : Heap::kUndefinedValueRootIndex;
3729 __ LoadRoot(r4, nil_value);
3730 __ cmp(r3, r4);
3731 Split(eq, if_true, if_false, fall_through);
3732 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003733 __ JumpIfSmi(r3, if_false);
3734 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3735 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3736 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3737 Split(ne, if_true, if_false, fall_through, cr0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003738 }
3739 context()->Plug(if_true, if_false);
3740}
3741
3742
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003743Register FullCodeGenerator::result_register() { return r3; }
3744
3745
3746Register FullCodeGenerator::context_register() { return cp; }
3747
Ben Murdochda12d292016-06-02 14:46:10 +01003748void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3749 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3750 __ LoadP(value, MemOperand(fp, frame_offset), r0);
3751}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003752
3753void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3754 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3755 __ StoreP(value, MemOperand(fp, frame_offset), r0);
3756}
3757
3758
3759void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3760 __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
3761}
3762
3763
3764void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3765 Scope* closure_scope = scope()->ClosureScope();
3766 if (closure_scope->is_script_scope() ||
3767 closure_scope->is_module_scope()) {
3768 // Contexts nested in the native context have a canonical empty function
3769 // as their closure, not the anonymous closure containing the global
3770 // code.
3771 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3772 } else if (closure_scope->is_eval_scope()) {
3773 // Contexts created by a call to eval have the same closure as the
3774 // context calling eval, not the anonymous closure containing the eval
3775 // code. Fetch it from the context.
3776 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3777 } else {
3778 DCHECK(closure_scope->is_function_scope());
3779 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3780 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003781 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003782}
3783
3784
3785// ----------------------------------------------------------------------------
3786// Non-local control flow support.
3787
3788void FullCodeGenerator::EnterFinallyBlock() {
3789 DCHECK(!result_register().is(r4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003790 // Store pending message while executing finally block.
3791 ExternalReference pending_message_obj =
3792 ExternalReference::address_of_pending_message_obj(isolate());
3793 __ mov(ip, Operand(pending_message_obj));
3794 __ LoadP(r4, MemOperand(ip));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003795 PushOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003796
3797 ClearPendingMessage();
3798}
3799
3800
3801void FullCodeGenerator::ExitFinallyBlock() {
3802 DCHECK(!result_register().is(r4));
3803 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003804 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003805 ExternalReference pending_message_obj =
3806 ExternalReference::address_of_pending_message_obj(isolate());
3807 __ mov(ip, Operand(pending_message_obj));
3808 __ StoreP(r4, MemOperand(ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003809}
3810
3811
3812void FullCodeGenerator::ClearPendingMessage() {
3813 DCHECK(!result_register().is(r4));
3814 ExternalReference pending_message_obj =
3815 ExternalReference::address_of_pending_message_obj(isolate());
3816 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
3817 __ mov(ip, Operand(pending_message_obj));
3818 __ StoreP(r4, MemOperand(ip));
3819}
3820
3821
Ben Murdoch097c5b22016-05-18 11:27:45 +01003822void FullCodeGenerator::DeferredCommands::EmitCommands() {
3823 DCHECK(!result_register().is(r4));
3824 // Restore the accumulator (r3) and token (r4).
3825 __ Pop(r4, result_register());
3826 for (DeferredCommand cmd : commands_) {
3827 Label skip;
3828 __ CmpSmiLiteral(r4, Smi::FromInt(cmd.token), r0);
3829 __ bne(&skip);
3830 switch (cmd.command) {
3831 case kReturn:
3832 codegen_->EmitUnwindAndReturn();
3833 break;
3834 case kThrow:
3835 __ Push(result_register());
3836 __ CallRuntime(Runtime::kReThrow);
3837 break;
3838 case kContinue:
3839 codegen_->EmitContinue(cmd.target);
3840 break;
3841 case kBreak:
3842 codegen_->EmitBreak(cmd.target);
3843 break;
3844 }
3845 __ bind(&skip);
3846 }
3847}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003848
3849#undef __
3850
3851
3852void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
3853 BackEdgeState target_state,
3854 Code* replacement_code) {
3855 Address mov_address = Assembler::target_address_from_return_address(pc);
3856 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
3857 Isolate* isolate = unoptimized_code->GetIsolate();
3858 CodePatcher patcher(isolate, cmp_address, 1);
3859
3860 switch (target_state) {
3861 case INTERRUPT: {
3862 // <decrement profiling counter>
3863 // cmpi r6, 0
3864 // bge <ok> ;; not changed
3865 // mov r12, <interrupt stub address>
3866 // mtlr r12
3867 // blrl
3868 // <reset profiling counter>
3869 // ok-label
3870 patcher.masm()->cmpi(r6, Operand::Zero());
3871 break;
3872 }
3873 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003874 // <decrement profiling counter>
3875 // crset
3876 // bge <ok> ;; not changed
3877 // mov r12, <on-stack replacement address>
3878 // mtlr r12
3879 // blrl
3880 // <reset profiling counter>
3881 // ok-label ----- pc_after points here
3882
3883 // Set the LT bit such that bge is a NOP
3884 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
3885 break;
3886 }
3887
3888 // Replace the stack check address in the mov sequence with the
3889 // entry address of the replacement code.
3890 Assembler::set_target_address_at(isolate, mov_address, unoptimized_code,
3891 replacement_code->entry());
3892
3893 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3894 unoptimized_code, mov_address, replacement_code);
3895}
3896
3897
3898BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3899 Isolate* isolate, Code* unoptimized_code, Address pc) {
3900 Address mov_address = Assembler::target_address_from_return_address(pc);
3901 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01003902#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003903 Address interrupt_address =
3904 Assembler::target_address_at(mov_address, unoptimized_code);
Ben Murdochda12d292016-06-02 14:46:10 +01003905#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003906
3907 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
3908 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
3909 return INTERRUPT;
3910 }
3911
3912 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
3913
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003914 DCHECK(interrupt_address ==
Ben Murdochda12d292016-06-02 14:46:10 +01003915 isolate->builtins()->OnStackReplacement()->entry());
3916 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003917}
3918} // namespace internal
3919} // namespace v8
3920#endif // V8_TARGET_ARCH_PPC