blob: 1f47983775a2f784e865e8b75fb0216f8d276c09 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_PPC
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/ppc/code-stubs-ppc.h"
17#include "src/ppc/macro-assembler-ppc.h"
18
19namespace v8 {
20namespace internal {
21
Ben Murdoch097c5b22016-05-18 11:27:45 +010022#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30// See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
31class JumpPatchSite BASE_EMBEDDED {
32 public:
33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
34#ifdef DEBUG
35 info_emitted_ = false;
36#endif
37 }
38
39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
40
41 // When initially emitting this ensure that a jump is always generated to skip
42 // the inlined smi code.
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 DCHECK(!patch_site_.is_bound() && !info_emitted_);
45 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
46 __ bind(&patch_site_);
47 __ cmp(reg, reg, cr0);
48 __ beq(target, cr0); // Always taken before patched.
49 }
50
51 // When initially emitting this ensure that a jump is never generated to skip
52 // the inlined smi code.
53 void EmitJumpIfSmi(Register reg, Label* target) {
54 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 __ bind(&patch_site_);
57 __ cmp(reg, reg, cr0);
58 __ bne(target, cr0); // Never taken before patched.
59 }
60
61 void EmitPatchInfo() {
62 if (patch_site_.is_bound()) {
63 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
64 Register reg;
65 // I believe this is using reg as the high bits of of the offset
66 reg.set_code(delta_to_patch_site / kOff16Mask);
67 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
68#ifdef DEBUG
69 info_emitted_ = true;
70#endif
71 } else {
72 __ nop(); // Signals no inlined code.
73 }
74 }
75
76 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010077 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 MacroAssembler* masm_;
79 Label patch_site_;
80#ifdef DEBUG
81 bool info_emitted_;
82#endif
83};
84
85
86// Generate code for a JS function. On entry to the function the receiver
87// and arguments have been pushed on the stack left to right. The actual
88// argument count matches the formal parameter count expected by the
89// function.
90//
91// The live registers are:
92// o r4: the JS function object being called (i.e., ourselves)
93// o r6: the new target value
94// o cp: our context
95// o fp: our caller's frame pointer (aka r31)
96// o sp: stack pointer
97// o lr: return address
98// o ip: our own function entry (required by the prologue)
99//
100// The function builds a JS frame. Please see JavaScriptFrameConstants in
101// frames-ppc.h for its layout.
102void FullCodeGenerator::Generate() {
103 CompilationInfo* info = info_;
104 profiling_counter_ = isolate()->factory()->NewCell(
105 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
106 SetFunctionPosition(literal());
107 Comment cmnt(masm_, "[ function compiled by full code generator");
108
109 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
112 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
113 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
114 __ AssertNotSmi(r5);
115 __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE);
116 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
117 }
118
119 // Open a frame scope to indicate that there is a frame on the stack. The
120 // MANUAL indicates that the scope shouldn't actually generate code to set up
121 // the frame (that is done below).
122 FrameScope frame_scope(masm_, StackFrame::MANUAL);
123 int prologue_offset = masm_->pc_offset();
124
125 if (prologue_offset) {
126 // Prologue logic requires it's starting address in ip and the
127 // corresponding offset from the function entry.
128 prologue_offset += Instruction::kInstrSize;
129 __ addi(ip, ip, Operand(prologue_offset));
130 }
131 info->set_prologue_offset(prologue_offset);
132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
133
134 {
135 Comment cmnt(masm_, "[ Allocate locals");
136 int locals_count = info->scope()->num_stack_slots();
137 // Generators allocate locals, if any, in context slots.
138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140 if (locals_count > 0) {
141 if (locals_count >= 128) {
142 Label ok;
143 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
145 __ cmpl(ip, r5);
146 __ bc_short(ge, &ok);
147 __ CallRuntime(Runtime::kThrowStackOverflow);
148 __ bind(&ok);
149 }
150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
152 if (locals_count >= kMaxPushes) {
153 int loop_iterations = locals_count / kMaxPushes;
154 __ mov(r5, Operand(loop_iterations));
155 __ mtctr(r5);
156 Label loop_header;
157 __ bind(&loop_header);
158 // Do pushes.
159 for (int i = 0; i < kMaxPushes; i++) {
160 __ push(ip);
161 }
162 // Continue loop if not done.
163 __ bdnz(&loop_header);
164 }
165 int remaining = locals_count % kMaxPushes;
166 // Emit the remaining pushes.
167 for (int i = 0; i < remaining; i++) {
168 __ push(ip);
169 }
170 }
171 }
172
173 bool function_in_register_r4 = true;
174
175 // Possibly allocate a local context.
176 if (info->scope()->num_heap_slots() > 0) {
177 // Argument to NewContext is the function, which is still in r4.
178 Comment cmnt(masm_, "[ Allocate context");
179 bool need_write_barrier = true;
180 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
181 if (info->scope()->is_script_scope()) {
182 __ push(r4);
183 __ Push(info->scope()->GetScopeInfo(info->isolate()));
184 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100185 PrepareForBailoutForId(BailoutId::ScriptContext(),
186 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000187 // The new target value is not used, clobbering is safe.
188 DCHECK_NULL(info->scope()->new_target_var());
189 } else {
190 if (info->scope()->new_target_var() != nullptr) {
191 __ push(r6); // Preserve new target.
192 }
193 if (slots <= FastNewContextStub::kMaximumSlots) {
194 FastNewContextStub stub(isolate(), slots);
195 __ CallStub(&stub);
196 // Result of FastNewContextStub is always in new space.
197 need_write_barrier = false;
198 } else {
199 __ push(r4);
200 __ CallRuntime(Runtime::kNewFunctionContext);
201 }
202 if (info->scope()->new_target_var() != nullptr) {
203 __ pop(r6); // Preserve new target.
204 }
205 }
206 function_in_register_r4 = false;
207 // Context is returned in r3. It replaces the context passed to us.
208 // It's saved in the stack and kept live in cp.
209 __ mr(cp, r3);
210 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
211 // Copy any necessary parameters into the context.
212 int num_parameters = info->scope()->num_parameters();
213 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
214 for (int i = first_parameter; i < num_parameters; i++) {
215 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
216 if (var->IsContextSlot()) {
217 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
218 (num_parameters - 1 - i) * kPointerSize;
219 // Load parameter from stack.
220 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
221 // Store it in the context.
222 MemOperand target = ContextMemOperand(cp, var->index());
223 __ StoreP(r3, target, r0);
224
225 // Update the write barrier.
226 if (need_write_barrier) {
227 __ RecordWriteContextSlot(cp, target.offset(), r3, r5,
228 kLRHasBeenSaved, kDontSaveFPRegs);
229 } else if (FLAG_debug_code) {
230 Label done;
231 __ JumpIfInNewSpace(cp, r3, &done);
232 __ Abort(kExpectedNewSpaceObject);
233 __ bind(&done);
234 }
235 }
236 }
237 }
238
239 // Register holding this function and new target are both trashed in case we
240 // bailout here. But since that can happen only when new target is not used
241 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100242 PrepareForBailoutForId(BailoutId::FunctionContext(),
243 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000244
245 // Possibly set up a local binding to the this function which is used in
246 // derived constructors with super calls.
247 Variable* this_function_var = scope()->this_function_var();
248 if (this_function_var != nullptr) {
249 Comment cmnt(masm_, "[ This function");
250 if (!function_in_register_r4) {
251 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
252 // The write barrier clobbers register again, keep it marked as such.
253 }
254 SetVar(this_function_var, r4, r3, r5);
255 }
256
257 // Possibly set up a local binding to the new target value.
258 Variable* new_target_var = scope()->new_target_var();
259 if (new_target_var != nullptr) {
260 Comment cmnt(masm_, "[ new.target");
261 SetVar(new_target_var, r6, r3, r5);
262 }
263
264 // Possibly allocate RestParameters
265 int rest_index;
266 Variable* rest_param = scope()->rest_parameter(&rest_index);
267 if (rest_param) {
268 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 if (!function_in_register_r4) {
270 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
271 }
272 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100274 function_in_register_r4 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 SetVar(rest_param, r3, r4, r5);
276 }
277
278 Variable* arguments = scope()->arguments();
279 if (arguments != NULL) {
280 // Function uses arguments object.
281 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000282 if (!function_in_register_r4) {
283 // Load this again, if it's used by the local context below.
284 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
285 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100286 if (is_strict(language_mode()) || !has_simple_parameters()) {
287 FastNewStrictArgumentsStub stub(isolate());
288 __ CallStub(&stub);
289 } else if (literal()->has_duplicate_parameters()) {
290 __ Push(r4);
291 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
292 } else {
293 FastNewSloppyArgumentsStub stub(isolate());
294 __ CallStub(&stub);
295 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296
297 SetVar(arguments, r3, r4, r5);
298 }
299
300 if (FLAG_trace) {
301 __ CallRuntime(Runtime::kTraceEnter);
302 }
303
Ben Murdochda12d292016-06-02 14:46:10 +0100304 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100305 PrepareForBailoutForId(BailoutId::FunctionEntry(),
306 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100307 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000308 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100309 VisitDeclarations(scope()->declarations());
310 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000311
Ben Murdochda12d292016-06-02 14:46:10 +0100312 // Assert that the declarations do not use ICs. Otherwise the debugger
313 // won't be able to redirect a PC at an IC to the correct IC in newly
314 // recompiled code.
315 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000316
Ben Murdochda12d292016-06-02 14:46:10 +0100317 {
318 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100319 PrepareForBailoutForId(BailoutId::Declarations(),
320 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100321 Label ok;
322 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
323 __ cmpl(sp, ip);
324 __ bc_short(ge, &ok);
325 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
326 __ bind(&ok);
327 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328
Ben Murdochda12d292016-06-02 14:46:10 +0100329 {
330 Comment cmnt(masm_, "[ Body");
331 DCHECK(loop_depth() == 0);
332 VisitStatements(literal()->body());
333 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334 }
335
336 // Always emit a 'return undefined' in case control fell off the end of
337 // the body.
338 {
339 Comment cmnt(masm_, "[ return <undefined>;");
340 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
341 }
342 EmitReturnSequence();
343
344 if (HasStackOverflow()) {
345 masm_->AbortConstantPoolBuilding();
346 }
347}
348
349
350void FullCodeGenerator::ClearAccumulator() {
351 __ LoadSmiLiteral(r3, Smi::FromInt(0));
352}
353
354
355void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
356 __ mov(r5, Operand(profiling_counter_));
357 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
358 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
359 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
360}
361
362
363void FullCodeGenerator::EmitProfilingCounterReset() {
364 int reset_value = FLAG_interrupt_budget;
365 __ mov(r5, Operand(profiling_counter_));
366 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
367 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
368}
369
370
371void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
372 Label* back_edge_target) {
373 Comment cmnt(masm_, "[ Back edge bookkeeping");
374 Label ok;
375
376 DCHECK(back_edge_target->is_bound());
377 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
378 kCodeSizeMultiplier / 2;
379 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
380 EmitProfilingCounterDecrement(weight);
381 {
382 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
383 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
384 // BackEdgeTable::PatchAt manipulates this sequence.
385 __ cmpi(r6, Operand::Zero());
386 __ bc_short(ge, &ok);
387 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
388
389 // Record a mapping of this PC offset to the OSR id. This is used to find
390 // the AST id from the unoptimized code in order to use it as a key into
391 // the deoptimization input data found in the optimized code.
392 RecordBackEdge(stmt->OsrEntryId());
393 }
394 EmitProfilingCounterReset();
395
396 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100397 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000398 // Record a mapping of the OSR id to this PC. This is used if the OSR
399 // entry becomes the target of a bailout. We don't expect it to be, but
400 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100401 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402}
403
Ben Murdoch097c5b22016-05-18 11:27:45 +0100404void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
405 bool is_tail_call) {
406 // Pretend that the exit is a backwards jump to the entry.
407 int weight = 1;
408 if (info_->ShouldSelfOptimize()) {
409 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
410 } else {
411 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
412 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
413 }
414 EmitProfilingCounterDecrement(weight);
415 Label ok;
416 __ cmpi(r6, Operand::Zero());
417 __ bge(&ok);
418 // Don't need to save result register if we are going to do a tail call.
419 if (!is_tail_call) {
420 __ push(r3);
421 }
422 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
423 if (!is_tail_call) {
424 __ pop(r3);
425 }
426 EmitProfilingCounterReset();
427 __ bind(&ok);
428}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000429
430void FullCodeGenerator::EmitReturnSequence() {
431 Comment cmnt(masm_, "[ Return sequence");
432 if (return_label_.is_bound()) {
433 __ b(&return_label_);
434 } else {
435 __ bind(&return_label_);
436 if (FLAG_trace) {
437 // Push the return value on the stack as the parameter.
438 // Runtime::TraceExit returns its parameter in r3
439 __ push(r3);
440 __ CallRuntime(Runtime::kTraceExit);
441 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100442 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443
444 // Make sure that the constant pool is not emitted inside of the return
445 // sequence.
446 {
447 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
448 int32_t arg_count = info_->scope()->num_parameters() + 1;
449 int32_t sp_delta = arg_count * kPointerSize;
450 SetReturnPosition(literal());
451 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
452 __ blr();
453 }
454 }
455}
456
Ben Murdochc5610432016-08-08 18:44:38 +0100457void FullCodeGenerator::RestoreContext() {
458 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
459}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000460
461void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
462 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
463 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000465}
466
467
468void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
469
470
471void FullCodeGenerator::AccumulatorValueContext::Plug(
472 Heap::RootListIndex index) const {
473 __ LoadRoot(result_register(), index);
474}
475
476
477void FullCodeGenerator::StackValueContext::Plug(
478 Heap::RootListIndex index) const {
479 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100480 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000481}
482
483
484void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
485 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
486 false_label_);
487 if (index == Heap::kUndefinedValueRootIndex ||
488 index == Heap::kNullValueRootIndex ||
489 index == Heap::kFalseValueRootIndex) {
490 if (false_label_ != fall_through_) __ b(false_label_);
491 } else if (index == Heap::kTrueValueRootIndex) {
492 if (true_label_ != fall_through_) __ b(true_label_);
493 } else {
494 __ LoadRoot(result_register(), index);
495 codegen()->DoTest(this);
496 }
497}
498
499
500void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
501
502
503void FullCodeGenerator::AccumulatorValueContext::Plug(
504 Handle<Object> lit) const {
505 __ mov(result_register(), Operand(lit));
506}
507
508
509void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
510 // Immediates cannot be pushed directly.
511 __ mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100512 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513}
514
515
516void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
517 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
518 false_label_);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100519 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
520 !lit->IsUndetectable());
521 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
522 lit->IsFalse(isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000523 if (false_label_ != fall_through_) __ b(false_label_);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100524 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000525 if (true_label_ != fall_through_) __ b(true_label_);
526 } else if (lit->IsString()) {
527 if (String::cast(*lit)->length() == 0) {
528 if (false_label_ != fall_through_) __ b(false_label_);
529 } else {
530 if (true_label_ != fall_through_) __ b(true_label_);
531 }
532 } else if (lit->IsSmi()) {
533 if (Smi::cast(*lit)->value() == 0) {
534 if (false_label_ != fall_through_) __ b(false_label_);
535 } else {
536 if (true_label_ != fall_through_) __ b(true_label_);
537 }
538 } else {
539 // For simplicity we always test the accumulator register.
540 __ mov(result_register(), Operand(lit));
541 codegen()->DoTest(this);
542 }
543}
544
545
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000546void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
547 Register reg) const {
548 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100549 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000550 __ StoreP(reg, MemOperand(sp, 0));
551}
552
553
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
555 Label* materialize_false) const {
556 DCHECK(materialize_true == materialize_false);
557 __ bind(materialize_true);
558}
559
560
561void FullCodeGenerator::AccumulatorValueContext::Plug(
562 Label* materialize_true, Label* materialize_false) const {
563 Label done;
564 __ bind(materialize_true);
565 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
566 __ b(&done);
567 __ bind(materialize_false);
568 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
569 __ bind(&done);
570}
571
572
573void FullCodeGenerator::StackValueContext::Plug(
574 Label* materialize_true, Label* materialize_false) const {
575 Label done;
576 __ bind(materialize_true);
577 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
578 __ b(&done);
579 __ bind(materialize_false);
580 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
581 __ bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100582 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000583}
584
585
586void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
587 Label* materialize_false) const {
588 DCHECK(materialize_true == true_label_);
589 DCHECK(materialize_false == false_label_);
590}
591
592
593void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
594 Heap::RootListIndex value_root_index =
595 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
596 __ LoadRoot(result_register(), value_root_index);
597}
598
599
600void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
601 Heap::RootListIndex value_root_index =
602 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
603 __ LoadRoot(ip, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100604 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605}
606
607
608void FullCodeGenerator::TestContext::Plug(bool flag) const {
609 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
610 false_label_);
611 if (flag) {
612 if (true_label_ != fall_through_) __ b(true_label_);
613 } else {
614 if (false_label_ != fall_through_) __ b(false_label_);
615 }
616}
617
618
619void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
620 Label* if_false, Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100621 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000622 CallIC(ic, condition->test_id());
623 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
624 Split(eq, if_true, if_false, fall_through);
625}
626
627
628void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
629 Label* fall_through, CRegister cr) {
630 if (if_false == fall_through) {
631 __ b(cond, if_true, cr);
632 } else if (if_true == fall_through) {
633 __ b(NegateCondition(cond), if_false, cr);
634 } else {
635 __ b(cond, if_true, cr);
636 __ b(if_false);
637 }
638}
639
640
641MemOperand FullCodeGenerator::StackOperand(Variable* var) {
642 DCHECK(var->IsStackAllocated());
643 // Offset is negative because higher indexes are at lower addresses.
644 int offset = -var->index() * kPointerSize;
645 // Adjust by a (parameter or local) base offset.
646 if (var->IsParameter()) {
647 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
648 } else {
649 offset += JavaScriptFrameConstants::kLocal0Offset;
650 }
651 return MemOperand(fp, offset);
652}
653
654
655MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
656 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
657 if (var->IsContextSlot()) {
658 int context_chain_length = scope()->ContextChainLength(var->scope());
659 __ LoadContext(scratch, context_chain_length);
660 return ContextMemOperand(scratch, var->index());
661 } else {
662 return StackOperand(var);
663 }
664}
665
666
667void FullCodeGenerator::GetVar(Register dest, Variable* var) {
668 // Use destination as scratch.
669 MemOperand location = VarOperand(var, dest);
670 __ LoadP(dest, location, r0);
671}
672
673
674void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
675 Register scratch1) {
676 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
677 DCHECK(!scratch0.is(src));
678 DCHECK(!scratch0.is(scratch1));
679 DCHECK(!scratch1.is(src));
680 MemOperand location = VarOperand(var, scratch0);
681 __ StoreP(src, location, r0);
682
683 // Emit the write barrier code if the location is in the heap.
684 if (var->IsContextSlot()) {
685 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
686 kLRHasBeenSaved, kDontSaveFPRegs);
687 }
688}
689
690
691void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
692 bool should_normalize,
693 Label* if_true,
694 Label* if_false) {
695 // Only prepare for bailouts before splits if we're in a test
696 // context. Otherwise, we let the Visit function deal with the
697 // preparation to avoid preparing with the same AST id twice.
698 if (!context()->IsTest()) return;
699
700 Label skip;
701 if (should_normalize) __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100702 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000703 if (should_normalize) {
704 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
705 __ cmp(r3, ip);
706 Split(eq, if_true, if_false, NULL);
707 __ bind(&skip);
708 }
709}
710
711
712void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
713 // The variable in the declaration always resides in the current function
714 // context.
715 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100716 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000717 // Check that we're not inside a with or catch context.
718 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
719 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
720 __ Check(ne, kDeclarationInWithContext);
721 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
722 __ Check(ne, kDeclarationInCatchContext);
723 }
724}
725
726
727void FullCodeGenerator::VisitVariableDeclaration(
728 VariableDeclaration* declaration) {
729 // If it was not possible to allocate the variable at compile time, we
730 // need to "declare" it at runtime to make sure it actually exists in the
731 // local context.
732 VariableProxy* proxy = declaration->proxy();
733 VariableMode mode = declaration->mode();
734 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100735 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000736 switch (variable->location()) {
737 case VariableLocation::GLOBAL:
738 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100739 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000740 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100741 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000742 break;
743
744 case VariableLocation::PARAMETER:
745 case VariableLocation::LOCAL:
746 if (hole_init) {
747 Comment cmnt(masm_, "[ VariableDeclaration");
748 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
749 __ StoreP(ip, StackOperand(variable));
750 }
751 break;
752
753 case VariableLocation::CONTEXT:
754 if (hole_init) {
755 Comment cmnt(masm_, "[ VariableDeclaration");
756 EmitDebugCheckDeclarationContext(variable);
757 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
758 __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0);
759 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100760 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000761 }
762 break;
763
764 case VariableLocation::LOOKUP: {
765 Comment cmnt(masm_, "[ VariableDeclaration");
Ben Murdoch61f157c2016-09-16 13:49:30 +0100766 DCHECK_EQ(VAR, mode);
767 DCHECK(!hole_init);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000768 __ mov(r5, Operand(variable->name()));
Ben Murdoch61f157c2016-09-16 13:49:30 +0100769 __ Push(r5);
770 __ CallRuntime(Runtime::kDeclareEvalVar);
Ben Murdochc5610432016-08-08 18:44:38 +0100771 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000772 break;
773 }
774 }
775}
776
777
778void FullCodeGenerator::VisitFunctionDeclaration(
779 FunctionDeclaration* declaration) {
780 VariableProxy* proxy = declaration->proxy();
781 Variable* variable = proxy->var();
782 switch (variable->location()) {
783 case VariableLocation::GLOBAL:
784 case VariableLocation::UNALLOCATED: {
785 globals_->Add(variable->name(), zone());
786 Handle<SharedFunctionInfo> function =
787 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
788 // Check for stack-overflow exception.
789 if (function.is_null()) return SetStackOverflow();
790 globals_->Add(function, zone());
791 break;
792 }
793
794 case VariableLocation::PARAMETER:
795 case VariableLocation::LOCAL: {
796 Comment cmnt(masm_, "[ FunctionDeclaration");
797 VisitForAccumulatorValue(declaration->fun());
798 __ StoreP(result_register(), StackOperand(variable));
799 break;
800 }
801
802 case VariableLocation::CONTEXT: {
803 Comment cmnt(masm_, "[ FunctionDeclaration");
804 EmitDebugCheckDeclarationContext(variable);
805 VisitForAccumulatorValue(declaration->fun());
806 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()),
807 r0);
808 int offset = Context::SlotOffset(variable->index());
809 // We know that we have written a function, which is not a smi.
810 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
811 kLRHasBeenSaved, kDontSaveFPRegs,
812 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100813 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000814 break;
815 }
816
817 case VariableLocation::LOOKUP: {
818 Comment cmnt(masm_, "[ FunctionDeclaration");
819 __ mov(r5, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100820 PushOperand(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000821 // Push initial value for function declaration.
822 VisitForStackValue(declaration->fun());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100823 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
Ben Murdochc5610432016-08-08 18:44:38 +0100824 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000825 break;
826 }
827 }
828}
829
830
831void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
832 // Call the runtime to declare the globals.
833 __ mov(r4, Operand(pairs));
834 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
835 __ Push(r4, r3);
836 __ CallRuntime(Runtime::kDeclareGlobals);
837 // Return value is ignored.
838}
839
840
841void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
842 // Call the runtime to declare the modules.
843 __ Push(descriptions);
844 __ CallRuntime(Runtime::kDeclareModules);
845 // Return value is ignored.
846}
847
848
849void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
850 Comment cmnt(masm_, "[ SwitchStatement");
851 Breakable nested_statement(this, stmt);
852 SetStatementPosition(stmt);
853
854 // Keep the switch value on the stack until a case matches.
855 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100856 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000857
858 ZoneList<CaseClause*>* clauses = stmt->cases();
859 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
860
861 Label next_test; // Recycled for each test.
862 // Compile all the tests with branches to their bodies.
863 for (int i = 0; i < clauses->length(); i++) {
864 CaseClause* clause = clauses->at(i);
865 clause->body_target()->Unuse();
866
867 // The default is not a test, but remember it as final fall through.
868 if (clause->is_default()) {
869 default_clause = clause;
870 continue;
871 }
872
873 Comment cmnt(masm_, "[ Case comparison");
874 __ bind(&next_test);
875 next_test.Unuse();
876
877 // Compile the label expression.
878 VisitForAccumulatorValue(clause->label());
879
880 // Perform the comparison as if via '==='.
881 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
882 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
883 JumpPatchSite patch_site(masm_);
884 if (inline_smi_code) {
885 Label slow_case;
886 __ orx(r5, r4, r3);
887 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
888
889 __ cmp(r4, r3);
890 __ bne(&next_test);
891 __ Drop(1); // Switch value is no longer needed.
892 __ b(clause->body_target());
893 __ bind(&slow_case);
894 }
895
896 // Record position before stub call for type feedback.
897 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100898 Handle<Code> ic =
899 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000900 CallIC(ic, clause->CompareId());
901 patch_site.EmitPatchInfo();
902
903 Label skip;
904 __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100905 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000906 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
907 __ cmp(r3, ip);
908 __ bne(&next_test);
909 __ Drop(1);
910 __ b(clause->body_target());
911 __ bind(&skip);
912
913 __ cmpi(r3, Operand::Zero());
914 __ bne(&next_test);
915 __ Drop(1); // Switch value is no longer needed.
916 __ b(clause->body_target());
917 }
918
919 // Discard the test value and jump to the default if present, otherwise to
920 // the end of the statement.
921 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100922 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000923 if (default_clause == NULL) {
924 __ b(nested_statement.break_label());
925 } else {
926 __ b(default_clause->body_target());
927 }
928
929 // Compile all the case bodies.
930 for (int i = 0; i < clauses->length(); i++) {
931 Comment cmnt(masm_, "[ Case body");
932 CaseClause* clause = clauses->at(i);
933 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100934 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000935 VisitStatements(clause->statements());
936 }
937
938 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100939 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000940}
941
942
943void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
944 Comment cmnt(masm_, "[ ForInStatement");
945 SetStatementPosition(stmt, SKIP_BREAK);
946
947 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
948
Ben Murdoch097c5b22016-05-18 11:27:45 +0100949 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000950 SetExpressionAsStatementPosition(stmt->enumerable());
951 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100952 OperandStackDepthIncrement(5);
953
954 Label loop, exit;
955 Iteration loop_statement(this, stmt);
956 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000957
Ben Murdoch097c5b22016-05-18 11:27:45 +0100958 // If the object is null or undefined, skip over the loop, otherwise convert
959 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000960 Label convert, done_convert;
961 __ JumpIfSmi(r3, &convert);
962 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
963 __ bge(&done_convert);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100964 __ CompareRoot(r3, Heap::kNullValueRootIndex);
965 __ beq(&exit);
966 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
967 __ beq(&exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000968 __ bind(&convert);
969 ToObjectStub stub(isolate());
970 __ CallStub(&stub);
971 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +0100972 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000973 __ push(r3);
974
Ben Murdochc5610432016-08-08 18:44:38 +0100975 // Check cache validity in generated code. If we cannot guarantee cache
976 // validity, call the runtime system to check cache validity or get the
977 // property names in a fixed array. Note: Proxies never have an enum cache,
978 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100979 Label call_runtime;
980 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000981
982 // The enum cache is valid. Load the map of the object being
983 // iterated over and use the cache for the iteration.
984 Label use_cache;
985 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
986 __ b(&use_cache);
987
988 // Get the set of properties to enumerate.
989 __ bind(&call_runtime);
990 __ push(r3); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100991 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +0100992 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000993
994 // If we got a map from the runtime call, we can do a fast
995 // modification check. Otherwise, we got a fixed array, and we have
996 // to do a slow check.
997 Label fixed_array;
998 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
999 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1000 __ cmp(r5, ip);
1001 __ bne(&fixed_array);
1002
1003 // We got a map in register r3. Get the enumeration cache from it.
1004 Label no_descriptors;
1005 __ bind(&use_cache);
1006
1007 __ EnumLength(r4, r3);
1008 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1009 __ beq(&no_descriptors);
1010
1011 __ LoadInstanceDescriptors(r3, r5);
1012 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1013 __ LoadP(r5,
1014 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1015
1016 // Set up the four remaining stack slots.
1017 __ push(r3); // Map.
1018 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1019 // Push enumeration cache, enumeration cache length (as smi) and zero.
1020 __ Push(r5, r4, r3);
1021 __ b(&loop);
1022
1023 __ bind(&no_descriptors);
1024 __ Drop(1);
1025 __ b(&exit);
1026
1027 // We got a fixed array in register r3. Iterate through that.
1028 __ bind(&fixed_array);
1029
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001030 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi(1) indicates slow check
1031 __ Push(r4, r3); // Smi and array
1032 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001033 __ Push(r4); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001034 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035 __ LoadSmiLiteral(r3, Smi::FromInt(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001036 __ Push(r3); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001037
1038 // Generate code for doing the condition check.
1039 __ bind(&loop);
1040 SetExpressionAsStatementPosition(stmt->each());
1041
1042 // Load the current count to r3, load the length to r4.
1043 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1044 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1045 __ cmpl(r3, r4); // Compare to the array length.
1046 __ bge(loop_statement.break_label());
1047
1048 // Get the current entry of the array into register r6.
1049 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1050 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1051 __ SmiToPtrArrayOffset(r6, r3);
1052 __ LoadPX(r6, MemOperand(r6, r5));
1053
1054 // Get the expected map from the stack or a smi in the
1055 // permanent slow case into register r5.
1056 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1057
1058 // Check if the expected map still matches that of the enumerable.
1059 // If not, we may have to filter the key.
1060 Label update_each;
1061 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1062 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1063 __ cmp(r7, r5);
1064 __ beq(&update_each);
1065
Ben Murdochda12d292016-06-02 14:46:10 +01001066 // We need to filter the key, record slow-path here.
1067 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001068 __ EmitLoadTypeFeedbackVector(r3);
1069 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1070 __ StoreP(
1071 r5, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0);
1072
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001073 // Convert the entry to a string or (smi) 0 if it isn't a property
1074 // any more. If the property has been removed while iterating, we
1075 // just skip it.
1076 __ Push(r4, r6); // Enumerable and current entry.
1077 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001078 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079 __ mr(r6, r3);
1080 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1081 __ cmp(r3, r0);
1082 __ beq(loop_statement.continue_label());
1083
1084 // Update the 'each' property or variable from the possibly filtered
1085 // entry in register r6.
1086 __ bind(&update_each);
1087 __ mr(result_register(), r6);
1088 // Perform the assignment as if via '='.
1089 {
1090 EffectContext context(this);
1091 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001092 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001093 }
1094
1095 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001096 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001097 // Generate code for the body of the loop.
1098 Visit(stmt->body());
1099
1100 // Generate code for the going to the next element by incrementing
1101 // the index (smi) stored on top of the stack.
1102 __ bind(loop_statement.continue_label());
1103 __ pop(r3);
1104 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1105 __ push(r3);
1106
1107 EmitBackEdgeBookkeeping(stmt, &loop);
1108 __ b(&loop);
1109
1110 // Remove the pointers stored on the stack.
1111 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001112 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001113
1114 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001115 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001116 __ bind(&exit);
1117 decrement_loop_depth();
1118}
1119
1120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001121void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1122 FeedbackVectorSlot slot) {
1123 DCHECK(NeedsHomeObject(initializer));
1124 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1125 __ mov(StoreDescriptor::NameRegister(),
1126 Operand(isolate()->factory()->home_object_symbol()));
1127 __ LoadP(StoreDescriptor::ValueRegister(),
1128 MemOperand(sp, offset * kPointerSize));
1129 EmitLoadStoreICSlot(slot);
1130 CallStoreIC();
1131}
1132
1133
1134void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1135 int offset,
1136 FeedbackVectorSlot slot) {
1137 DCHECK(NeedsHomeObject(initializer));
1138 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1139 __ mov(StoreDescriptor::NameRegister(),
1140 Operand(isolate()->factory()->home_object_symbol()));
1141 __ LoadP(StoreDescriptor::ValueRegister(),
1142 MemOperand(sp, offset * kPointerSize));
1143 EmitLoadStoreICSlot(slot);
1144 CallStoreIC();
1145}
1146
1147
1148void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1149 TypeofMode typeof_mode,
1150 Label* slow) {
1151 Register current = cp;
1152 Register next = r4;
1153 Register temp = r5;
1154
1155 Scope* s = scope();
1156 while (s != NULL) {
1157 if (s->num_heap_slots() > 0) {
1158 if (s->calls_sloppy_eval()) {
1159 // Check that extension is "the hole".
1160 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1161 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1162 }
1163 // Load next context in chain.
1164 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1165 // Walk the rest of the chain without clobbering cp.
1166 current = next;
1167 }
1168 // If no outer scope calls eval, we do not need to check more
1169 // context extensions.
1170 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1171 s = s->outer_scope();
1172 }
1173
1174 if (s->is_eval_scope()) {
1175 Label loop, fast;
1176 if (!current.is(next)) {
1177 __ Move(next, current);
1178 }
1179 __ bind(&loop);
1180 // Terminate at native context.
1181 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1182 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1183 __ cmp(temp, ip);
1184 __ beq(&fast);
1185 // Check that extension is "the hole".
1186 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1187 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1188 // Load next context in chain.
1189 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1190 __ b(&loop);
1191 __ bind(&fast);
1192 }
1193
1194 // All extension objects were empty and it is safe to use a normal global
1195 // load machinery.
1196 EmitGlobalVariableLoad(proxy, typeof_mode);
1197}
1198
1199
1200MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1201 Label* slow) {
1202 DCHECK(var->IsContextSlot());
1203 Register context = cp;
1204 Register next = r6;
1205 Register temp = r7;
1206
1207 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1208 if (s->num_heap_slots() > 0) {
1209 if (s->calls_sloppy_eval()) {
1210 // Check that extension is "the hole".
1211 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1212 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1213 }
1214 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1215 // Walk the rest of the chain without clobbering cp.
1216 context = next;
1217 }
1218 }
1219 // Check that last extension is "the hole".
1220 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1221 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1222
1223 // This function is used only for loads, not stores, so it's safe to
1224 // return an cp-based operand (the write barrier cannot be allowed to
1225 // destroy the cp register).
1226 return ContextMemOperand(context, var->index());
1227}
1228
1229
1230void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1231 TypeofMode typeof_mode,
1232 Label* slow, Label* done) {
1233 // Generate fast-case code for variables that might be shadowed by
1234 // eval-introduced variables. Eval is used a lot without
1235 // introducing variables. In those cases, we do not want to
1236 // perform a runtime call for all variables in the scope
1237 // containing the eval.
1238 Variable* var = proxy->var();
1239 if (var->mode() == DYNAMIC_GLOBAL) {
1240 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1241 __ b(done);
1242 } else if (var->mode() == DYNAMIC_LOCAL) {
1243 Variable* local = var->local_if_not_shadowed();
1244 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001245 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001246 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1247 __ bne(done);
Ben Murdochc5610432016-08-08 18:44:38 +01001248 __ mov(r3, Operand(var->name()));
1249 __ push(r3);
1250 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251 }
1252 __ b(done);
1253 }
1254}
1255
1256
1257void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1258 TypeofMode typeof_mode) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001259#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001260 Variable* var = proxy->var();
1261 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1262 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001263#endif
1264 __ mov(LoadGlobalDescriptor::SlotRegister(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001265 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001266 CallLoadGlobalIC(typeof_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001267}
1268
1269
1270void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1271 TypeofMode typeof_mode) {
1272 // Record position before possible IC call.
1273 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001274 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001275 Variable* var = proxy->var();
1276
1277 // Three cases: global variables, lookup variables, and all other types of
1278 // variables.
1279 switch (var->location()) {
1280 case VariableLocation::GLOBAL:
1281 case VariableLocation::UNALLOCATED: {
1282 Comment cmnt(masm_, "[ Global variable");
1283 EmitGlobalVariableLoad(proxy, typeof_mode);
1284 context()->Plug(r3);
1285 break;
1286 }
1287
1288 case VariableLocation::PARAMETER:
1289 case VariableLocation::LOCAL:
1290 case VariableLocation::CONTEXT: {
1291 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1292 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1293 : "[ Stack variable");
1294 if (NeedsHoleCheckForLoad(proxy)) {
1295 Label done;
1296 // Let and const need a read barrier.
1297 GetVar(r3, var);
1298 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1299 __ bne(&done);
1300 if (var->mode() == LET || var->mode() == CONST) {
1301 // Throw a reference error when using an uninitialized let/const
1302 // binding in harmony mode.
1303 __ mov(r3, Operand(var->name()));
1304 __ push(r3);
1305 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001306 }
1307 __ bind(&done);
1308 context()->Plug(r3);
1309 break;
1310 }
1311 context()->Plug(var);
1312 break;
1313 }
1314
1315 case VariableLocation::LOOKUP: {
1316 Comment cmnt(masm_, "[ Lookup variable");
1317 Label done, slow;
1318 // Generate code for loading from variables potentially shadowed
1319 // by eval-introduced variables.
1320 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1321 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001322 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001323 Runtime::FunctionId function_id =
1324 typeof_mode == NOT_INSIDE_TYPEOF
1325 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001326 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001327 __ CallRuntime(function_id);
1328 __ bind(&done);
1329 context()->Plug(r3);
1330 }
1331 }
1332}
1333
1334
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001335void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1336 Expression* expression = (property == NULL) ? NULL : property->value();
1337 if (expression == NULL) {
1338 __ LoadRoot(r4, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001339 PushOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001340 } else {
1341 VisitForStackValue(expression);
1342 if (NeedsHomeObject(expression)) {
1343 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1344 property->kind() == ObjectLiteral::Property::SETTER);
1345 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1346 EmitSetHomeObject(expression, offset, property->GetSlot());
1347 }
1348 }
1349}
1350
1351
1352void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1353 Comment cmnt(masm_, "[ ObjectLiteral");
1354
1355 Handle<FixedArray> constant_properties = expr->constant_properties();
1356 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1357 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1358 __ mov(r4, Operand(constant_properties));
1359 int flags = expr->ComputeFlags();
1360 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1361 if (MustCreateObjectLiteralWithRuntime(expr)) {
1362 __ Push(r6, r5, r4, r3);
1363 __ CallRuntime(Runtime::kCreateObjectLiteral);
1364 } else {
1365 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1366 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001367 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001368 }
Ben Murdochc5610432016-08-08 18:44:38 +01001369 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001370
1371 // If result_saved is true the result is on top of the stack. If
1372 // result_saved is false the result is in r3.
1373 bool result_saved = false;
1374
1375 AccessorTable accessor_table(zone());
1376 int property_index = 0;
1377 for (; property_index < expr->properties()->length(); property_index++) {
1378 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1379 if (property->is_computed_name()) break;
1380 if (property->IsCompileTimeValue()) continue;
1381
1382 Literal* key = property->key()->AsLiteral();
1383 Expression* value = property->value();
1384 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001385 PushOperand(r3); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001386 result_saved = true;
1387 }
1388 switch (property->kind()) {
1389 case ObjectLiteral::Property::CONSTANT:
1390 UNREACHABLE();
1391 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1392 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1393 // Fall through.
1394 case ObjectLiteral::Property::COMPUTED:
1395 // It is safe to use [[Put]] here because the boilerplate already
1396 // contains computed properties with an uninitialized value.
1397 if (key->value()->IsInternalizedString()) {
1398 if (property->emit_store()) {
1399 VisitForAccumulatorValue(value);
1400 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1401 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1402 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1403 EmitLoadStoreICSlot(property->GetSlot(0));
1404 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001405 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001406
1407 if (NeedsHomeObject(value)) {
1408 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1409 }
1410 } else {
1411 VisitForEffect(value);
1412 }
1413 break;
1414 }
1415 // Duplicate receiver on stack.
1416 __ LoadP(r3, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001417 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 VisitForStackValue(key);
1419 VisitForStackValue(value);
1420 if (property->emit_store()) {
1421 if (NeedsHomeObject(value)) {
1422 EmitSetHomeObject(value, 2, property->GetSlot());
1423 }
1424 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
Ben Murdoch097c5b22016-05-18 11:27:45 +01001425 PushOperand(r3);
1426 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001427 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001428 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001429 }
1430 break;
1431 case ObjectLiteral::Property::PROTOTYPE:
1432 // Duplicate receiver on stack.
1433 __ LoadP(r3, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001434 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001435 VisitForStackValue(value);
1436 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001437 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001438 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001439 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440 break;
1441 case ObjectLiteral::Property::GETTER:
1442 if (property->emit_store()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001443 AccessorTable::Iterator it = accessor_table.lookup(key);
1444 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1445 it->second->getter = property;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001446 }
1447 break;
1448 case ObjectLiteral::Property::SETTER:
1449 if (property->emit_store()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001450 AccessorTable::Iterator it = accessor_table.lookup(key);
1451 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1452 it->second->setter = property;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001453 }
1454 break;
1455 }
1456 }
1457
1458 // Emit code to define accessors, using only a single call to the runtime for
1459 // each pair of corresponding getters and setters.
1460 for (AccessorTable::Iterator it = accessor_table.begin();
1461 it != accessor_table.end(); ++it) {
1462 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001463 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001464 VisitForStackValue(it->first);
1465 EmitAccessor(it->second->getter);
1466 EmitAccessor(it->second->setter);
1467 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001468 PushOperand(r3);
1469 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001470 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001471 }
1472
1473 // Object literals have two parts. The "static" part on the left contains no
1474 // computed property names, and so we can compute its map ahead of time; see
1475 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1476 // starts with the first computed property name, and continues with all
1477 // properties to its right. All the code from above initializes the static
1478 // component of the object literal, and arranges for the map of the result to
1479 // reflect the static order in which the keys appear. For the dynamic
1480 // properties, we compile them into a series of "SetOwnProperty" runtime
1481 // calls. This will preserve insertion order.
1482 for (; property_index < expr->properties()->length(); property_index++) {
1483 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1484
1485 Expression* value = property->value();
1486 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001487 PushOperand(r3); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001488 result_saved = true;
1489 }
1490
1491 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001492 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001493
1494 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1495 DCHECK(!property->is_computed_name());
1496 VisitForStackValue(value);
1497 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001498 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001499 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001500 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001501 } else {
1502 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1503 VisitForStackValue(value);
1504 if (NeedsHomeObject(value)) {
1505 EmitSetHomeObject(value, 2, property->GetSlot());
1506 }
1507
1508 switch (property->kind()) {
1509 case ObjectLiteral::Property::CONSTANT:
1510 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1511 case ObjectLiteral::Property::COMPUTED:
1512 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001513 PushOperand(Smi::FromInt(NONE));
1514 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1515 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001516 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1517 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001519 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001520 }
1521 break;
1522
1523 case ObjectLiteral::Property::PROTOTYPE:
1524 UNREACHABLE();
1525 break;
1526
1527 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001528 PushOperand(Smi::FromInt(NONE));
1529 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001530 break;
1531
1532 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001533 PushOperand(Smi::FromInt(NONE));
1534 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535 break;
1536 }
1537 }
1538 }
1539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540 if (result_saved) {
1541 context()->PlugTOS();
1542 } else {
1543 context()->Plug(r3);
1544 }
1545}
1546
1547
1548void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1549 Comment cmnt(masm_, "[ ArrayLiteral");
1550
1551 Handle<FixedArray> constant_elements = expr->constant_elements();
1552 bool has_fast_elements =
1553 IsFastObjectElementsKind(expr->constant_elements_kind());
1554 Handle<FixedArrayBase> constant_elements_values(
1555 FixedArrayBase::cast(constant_elements->get(1)));
1556
1557 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1558 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1559 // If the only customer of allocation sites is transitioning, then
1560 // we can turn it off if we don't have anywhere else to transition to.
1561 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1562 }
1563
1564 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1565 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1566 __ mov(r4, Operand(constant_elements));
1567 if (MustCreateArrayLiteralWithRuntime(expr)) {
1568 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1569 __ Push(r6, r5, r4, r3);
1570 __ CallRuntime(Runtime::kCreateArrayLiteral);
1571 } else {
1572 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1573 __ CallStub(&stub);
1574 }
Ben Murdochc5610432016-08-08 18:44:38 +01001575 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001576
1577 bool result_saved = false; // Is the result saved to the stack?
1578 ZoneList<Expression*>* subexprs = expr->values();
1579 int length = subexprs->length();
1580
1581 // Emit code to evaluate all the non-constant subexpressions and to store
1582 // them into the newly cloned array.
1583 int array_index = 0;
1584 for (; array_index < length; array_index++) {
1585 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001586 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001587 // If the subexpression is a literal or a simple materialized literal it
1588 // is already set in the cloned array.
1589 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1590
1591 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001592 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 result_saved = true;
1594 }
1595 VisitForAccumulatorValue(subexpr);
1596
1597 __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1598 Smi::FromInt(array_index));
1599 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1600 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1601 Handle<Code> ic =
1602 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1603 CallIC(ic);
1604
Ben Murdochc5610432016-08-08 18:44:38 +01001605 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1606 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001607 }
1608
1609 // In case the array literal contains spread expressions it has two parts. The
1610 // first part is the "static" array which has a literal index is handled
1611 // above. The second part is the part after the first spread expression
1612 // (inclusive) and these elements gets appended to the array. Note that the
1613 // number elements an iterable produces is unknown ahead of time.
1614 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001615 PopOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001616 result_saved = false;
1617 }
1618 for (; array_index < length; array_index++) {
1619 Expression* subexpr = subexprs->at(array_index);
1620
Ben Murdoch097c5b22016-05-18 11:27:45 +01001621 PushOperand(r3);
1622 DCHECK(!subexpr->IsSpread());
1623 VisitForStackValue(subexpr);
1624 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625
Ben Murdochc5610432016-08-08 18:44:38 +01001626 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1627 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001628 }
1629
1630 if (result_saved) {
1631 context()->PlugTOS();
1632 } else {
1633 context()->Plug(r3);
1634 }
1635}
1636
1637
1638void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1639 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1640
1641 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001642
1643 Property* property = expr->target()->AsProperty();
1644 LhsKind assign_type = Property::GetAssignType(property);
1645
1646 // Evaluate LHS expression.
1647 switch (assign_type) {
1648 case VARIABLE:
1649 // Nothing to do here.
1650 break;
1651 case NAMED_PROPERTY:
1652 if (expr->is_compound()) {
1653 // We need the receiver both on the stack and in the register.
1654 VisitForStackValue(property->obj());
1655 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1656 } else {
1657 VisitForStackValue(property->obj());
1658 }
1659 break;
1660 case NAMED_SUPER_PROPERTY:
1661 VisitForStackValue(
1662 property->obj()->AsSuperPropertyReference()->this_var());
1663 VisitForAccumulatorValue(
1664 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001665 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001666 if (expr->is_compound()) {
1667 const Register scratch = r4;
1668 __ LoadP(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001669 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001670 }
1671 break;
1672 case KEYED_SUPER_PROPERTY: {
1673 const Register scratch = r4;
1674 VisitForStackValue(
1675 property->obj()->AsSuperPropertyReference()->this_var());
1676 VisitForAccumulatorValue(
1677 property->obj()->AsSuperPropertyReference()->home_object());
1678 __ mr(scratch, result_register());
1679 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001680 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001681 if (expr->is_compound()) {
1682 const Register scratch1 = r5;
1683 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001684 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001685 }
1686 break;
1687 }
1688 case KEYED_PROPERTY:
1689 if (expr->is_compound()) {
1690 VisitForStackValue(property->obj());
1691 VisitForStackValue(property->key());
1692 __ LoadP(LoadDescriptor::ReceiverRegister(),
1693 MemOperand(sp, 1 * kPointerSize));
1694 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1695 } else {
1696 VisitForStackValue(property->obj());
1697 VisitForStackValue(property->key());
1698 }
1699 break;
1700 }
1701
1702 // For compound assignments we need another deoptimization point after the
1703 // variable/property load.
1704 if (expr->is_compound()) {
1705 {
1706 AccumulatorValueContext context(this);
1707 switch (assign_type) {
1708 case VARIABLE:
1709 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001710 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001711 break;
1712 case NAMED_PROPERTY:
1713 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001714 PrepareForBailoutForId(property->LoadId(),
1715 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 break;
1717 case NAMED_SUPER_PROPERTY:
1718 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001719 PrepareForBailoutForId(property->LoadId(),
1720 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721 break;
1722 case KEYED_SUPER_PROPERTY:
1723 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001724 PrepareForBailoutForId(property->LoadId(),
1725 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001726 break;
1727 case KEYED_PROPERTY:
1728 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001729 PrepareForBailoutForId(property->LoadId(),
1730 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001731 break;
1732 }
1733 }
1734
1735 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001736 PushOperand(r3); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001737 VisitForAccumulatorValue(expr->value());
1738
1739 AccumulatorValueContext context(this);
1740 if (ShouldInlineSmiCase(op)) {
1741 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1742 expr->value());
1743 } else {
1744 EmitBinaryOp(expr->binary_operation(), op);
1745 }
1746
1747 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001748 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001749 } else {
1750 VisitForAccumulatorValue(expr->value());
1751 }
1752
1753 SetExpressionPosition(expr);
1754
1755 // Store the value.
1756 switch (assign_type) {
1757 case VARIABLE:
1758 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1759 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001760 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001761 context()->Plug(r3);
1762 break;
1763 case NAMED_PROPERTY:
1764 EmitNamedPropertyAssignment(expr);
1765 break;
1766 case NAMED_SUPER_PROPERTY:
1767 EmitNamedSuperPropertyStore(property);
1768 context()->Plug(r3);
1769 break;
1770 case KEYED_SUPER_PROPERTY:
1771 EmitKeyedSuperPropertyStore(property);
1772 context()->Plug(r3);
1773 break;
1774 case KEYED_PROPERTY:
1775 EmitKeyedPropertyAssignment(expr);
1776 break;
1777 }
1778}
1779
1780
1781void FullCodeGenerator::VisitYield(Yield* expr) {
1782 Comment cmnt(masm_, "[ Yield");
1783 SetExpressionPosition(expr);
1784
1785 // Evaluate yielded value first; the initial iterator definition depends on
1786 // this. It stays on the stack while we update the iterator.
1787 VisitForStackValue(expr->expression());
1788
Ben Murdochc5610432016-08-08 18:44:38 +01001789 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001790
Ben Murdochda12d292016-06-02 14:46:10 +01001791 __ b(&suspend);
1792 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001793 // When we arrive here, r3 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001794 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001795 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001796 __ LoadP(r3, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01001797 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1798 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1799 __ CmpSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kReturn), r0);
1800 __ blt(&resume);
1801 __ Push(result_register());
1802 __ bgt(&exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001803 EmitCreateIteratorResult(true);
1804 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001805
Ben Murdochc5610432016-08-08 18:44:38 +01001806 __ bind(&exception);
1807 __ CallRuntime(Runtime::kThrow);
1808
Ben Murdochda12d292016-06-02 14:46:10 +01001809 __ bind(&suspend);
1810 OperandStackDepthIncrement(1); // Not popped on this path.
1811 VisitForAccumulatorValue(expr->generator_object());
1812 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1813 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
1814 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
1815 r0);
1816 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
1817 __ mr(r4, cp);
1818 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
1819 kLRHasBeenSaved, kDontSaveFPRegs);
1820 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1821 __ cmp(sp, r4);
1822 __ beq(&post_runtime);
1823 __ push(r3); // generator object
1824 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001825 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001826 __ bind(&post_runtime);
1827 PopOperand(result_register());
1828 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001829
Ben Murdochda12d292016-06-02 14:46:10 +01001830 __ bind(&resume);
1831 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001832}
1833
Ben Murdoch097c5b22016-05-18 11:27:45 +01001834void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1835 OperandStackDepthIncrement(2);
1836 __ Push(reg1, reg2);
1837}
1838
1839void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1840 Register reg3) {
1841 OperandStackDepthIncrement(3);
1842 __ Push(reg1, reg2, reg3);
1843}
1844
1845void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1846 Register reg3, Register reg4) {
1847 OperandStackDepthIncrement(4);
1848 __ Push(reg1, reg2, reg3, reg4);
1849}
1850
1851void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1852 OperandStackDepthDecrement(2);
1853 __ Pop(reg1, reg2);
1854}
1855
1856void FullCodeGenerator::EmitOperandStackDepthCheck() {
1857 if (FLAG_debug_code) {
1858 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1859 operand_stack_depth_ * kPointerSize;
1860 __ sub(r3, fp, sp);
1861 __ cmpi(r3, Operand(expected_diff));
1862 __ Assert(eq, kUnexpectedStackDepth);
1863 }
1864}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001865
1866void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1867 Label allocate, done_allocate;
1868
Ben Murdochc5610432016-08-08 18:44:38 +01001869 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate,
1870 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001871 __ b(&done_allocate);
1872
1873 __ bind(&allocate);
1874 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1875 __ CallRuntime(Runtime::kAllocateInNewSpace);
1876
1877 __ bind(&done_allocate);
1878 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
Ben Murdochda12d292016-06-02 14:46:10 +01001879 PopOperand(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001880 __ LoadRoot(r6,
1881 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1882 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
1883 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
1884 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
1885 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1886 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
1887 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
1888}
1889
1890
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001891void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1892 Token::Value op,
1893 Expression* left_expr,
1894 Expression* right_expr) {
1895 Label done, smi_case, stub_call;
1896
1897 Register scratch1 = r5;
1898 Register scratch2 = r6;
1899
1900 // Get the arguments.
1901 Register left = r4;
1902 Register right = r3;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001903 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001904
1905 // Perform combined smi check on both operands.
1906 __ orx(scratch1, left, right);
1907 STATIC_ASSERT(kSmiTag == 0);
1908 JumpPatchSite patch_site(masm_);
1909 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1910
1911 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001912 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001913 CallIC(code, expr->BinaryOperationFeedbackId());
1914 patch_site.EmitPatchInfo();
1915 __ b(&done);
1916
1917 __ bind(&smi_case);
1918 // Smi case. This code works the same way as the smi-smi case in the type
1919 // recording binary operation stub.
1920 switch (op) {
1921 case Token::SAR:
1922 __ GetLeastBitsFromSmi(scratch1, right, 5);
1923 __ ShiftRightArith(right, left, scratch1);
1924 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
1925 break;
1926 case Token::SHL: {
1927 __ GetLeastBitsFromSmi(scratch2, right, 5);
1928#if V8_TARGET_ARCH_PPC64
1929 __ ShiftLeft_(right, left, scratch2);
1930#else
1931 __ SmiUntag(scratch1, left);
1932 __ ShiftLeft_(scratch1, scratch1, scratch2);
1933 // Check that the *signed* result fits in a smi
1934 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
1935 __ SmiTag(right, scratch1);
1936#endif
1937 break;
1938 }
1939 case Token::SHR: {
1940 __ SmiUntag(scratch1, left);
1941 __ GetLeastBitsFromSmi(scratch2, right, 5);
1942 __ srw(scratch1, scratch1, scratch2);
1943 // Unsigned shift is not allowed to produce a negative number.
1944 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
1945 __ SmiTag(right, scratch1);
1946 break;
1947 }
1948 case Token::ADD: {
1949 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1950 __ BranchOnOverflow(&stub_call);
1951 __ mr(right, scratch1);
1952 break;
1953 }
1954 case Token::SUB: {
1955 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1956 __ BranchOnOverflow(&stub_call);
1957 __ mr(right, scratch1);
1958 break;
1959 }
1960 case Token::MUL: {
1961 Label mul_zero;
1962#if V8_TARGET_ARCH_PPC64
1963 // Remove tag from both operands.
1964 __ SmiUntag(ip, right);
1965 __ SmiUntag(r0, left);
1966 __ Mul(scratch1, r0, ip);
1967 // Check for overflowing the smi range - no overflow if higher 33 bits of
1968 // the result are identical.
1969 __ TestIfInt32(scratch1, r0);
1970 __ bne(&stub_call);
1971#else
1972 __ SmiUntag(ip, right);
1973 __ mullw(scratch1, left, ip);
1974 __ mulhw(scratch2, left, ip);
1975 // Check for overflowing the smi range - no overflow if higher 33 bits of
1976 // the result are identical.
1977 __ TestIfInt32(scratch2, scratch1, ip);
1978 __ bne(&stub_call);
1979#endif
1980 // Go slow on zero result to handle -0.
1981 __ cmpi(scratch1, Operand::Zero());
1982 __ beq(&mul_zero);
1983#if V8_TARGET_ARCH_PPC64
1984 __ SmiTag(right, scratch1);
1985#else
1986 __ mr(right, scratch1);
1987#endif
1988 __ b(&done);
1989 // We need -0 if we were multiplying a negative number with 0 to get 0.
1990 // We know one of them was zero.
1991 __ bind(&mul_zero);
1992 __ add(scratch2, right, left);
1993 __ cmpi(scratch2, Operand::Zero());
1994 __ blt(&stub_call);
1995 __ LoadSmiLiteral(right, Smi::FromInt(0));
1996 break;
1997 }
1998 case Token::BIT_OR:
1999 __ orx(right, left, right);
2000 break;
2001 case Token::BIT_AND:
2002 __ and_(right, left, right);
2003 break;
2004 case Token::BIT_XOR:
2005 __ xor_(right, left, right);
2006 break;
2007 default:
2008 UNREACHABLE();
2009 }
2010
2011 __ bind(&done);
2012 context()->Plug(r3);
2013}
2014
2015
2016void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002017 for (int i = 0; i < lit->properties()->length(); i++) {
2018 ObjectLiteral::Property* property = lit->properties()->at(i);
2019 Expression* value = property->value();
2020
Ben Murdoch097c5b22016-05-18 11:27:45 +01002021 Register scratch = r4;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002022 if (property->is_static()) {
2023 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2024 } else {
2025 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2026 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002027 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002028 EmitPropertyKey(property, lit->GetIdForProperty(i));
2029
2030 // The static prototype property is read only. We handle the non computed
2031 // property name case in the parser. Since this is the only case where we
2032 // need to check for an own read only property we special case this so we do
2033 // not need to do this for every property.
2034 if (property->is_static() && property->is_computed_name()) {
2035 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2036 __ push(r3);
2037 }
2038
2039 VisitForStackValue(value);
2040 if (NeedsHomeObject(value)) {
2041 EmitSetHomeObject(value, 2, property->GetSlot());
2042 }
2043
2044 switch (property->kind()) {
2045 case ObjectLiteral::Property::CONSTANT:
2046 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2047 case ObjectLiteral::Property::PROTOTYPE:
2048 UNREACHABLE();
2049 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002050 PushOperand(Smi::FromInt(DONT_ENUM));
2051 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2052 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002053 break;
2054
2055 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002056 PushOperand(Smi::FromInt(DONT_ENUM));
2057 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002058 break;
2059
2060 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002061 PushOperand(Smi::FromInt(DONT_ENUM));
2062 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002063 break;
2064
2065 default:
2066 UNREACHABLE();
2067 }
2068 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002069}
2070
2071
2072void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002073 PopOperand(r4);
2074 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002075 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2076 CallIC(code, expr->BinaryOperationFeedbackId());
2077 patch_site.EmitPatchInfo();
2078 context()->Plug(r3);
2079}
2080
2081
2082void FullCodeGenerator::EmitAssignment(Expression* expr,
2083 FeedbackVectorSlot slot) {
2084 DCHECK(expr->IsValidReferenceExpressionOrThis());
2085
2086 Property* prop = expr->AsProperty();
2087 LhsKind assign_type = Property::GetAssignType(prop);
2088
2089 switch (assign_type) {
2090 case VARIABLE: {
2091 Variable* var = expr->AsVariableProxy()->var();
2092 EffectContext context(this);
2093 EmitVariableAssignment(var, Token::ASSIGN, slot);
2094 break;
2095 }
2096 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002097 PushOperand(r3); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002098 VisitForAccumulatorValue(prop->obj());
2099 __ Move(StoreDescriptor::ReceiverRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002100 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002101 __ mov(StoreDescriptor::NameRegister(),
2102 Operand(prop->key()->AsLiteral()->value()));
2103 EmitLoadStoreICSlot(slot);
2104 CallStoreIC();
2105 break;
2106 }
2107 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002108 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002109 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2110 VisitForAccumulatorValue(
2111 prop->obj()->AsSuperPropertyReference()->home_object());
2112 // stack: value, this; r3: home_object
2113 Register scratch = r5;
2114 Register scratch2 = r6;
2115 __ mr(scratch, result_register()); // home_object
2116 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2117 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2118 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2119 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2120 // stack: this, home_object; r3: value
2121 EmitNamedSuperPropertyStore(prop);
2122 break;
2123 }
2124 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002125 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002126 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2127 VisitForStackValue(
2128 prop->obj()->AsSuperPropertyReference()->home_object());
2129 VisitForAccumulatorValue(prop->key());
2130 Register scratch = r5;
2131 Register scratch2 = r6;
2132 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2133 // stack: value, this, home_object; r3: key, r6: value
2134 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2135 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2136 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2137 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2138 __ StoreP(r3, MemOperand(sp, 0));
2139 __ Move(r3, scratch2);
2140 // stack: this, home_object, key; r3: value.
2141 EmitKeyedSuperPropertyStore(prop);
2142 break;
2143 }
2144 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002145 PushOperand(r3); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002146 VisitForStackValue(prop->obj());
2147 VisitForAccumulatorValue(prop->key());
2148 __ Move(StoreDescriptor::NameRegister(), r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002149 PopOperands(StoreDescriptor::ValueRegister(),
2150 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002151 EmitLoadStoreICSlot(slot);
2152 Handle<Code> ic =
2153 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2154 CallIC(ic);
2155 break;
2156 }
2157 }
2158 context()->Plug(r3);
2159}
2160
2161
2162void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2163 Variable* var, MemOperand location) {
2164 __ StoreP(result_register(), location, r0);
2165 if (var->IsContextSlot()) {
2166 // RecordWrite may destroy all its register arguments.
2167 __ mr(r6, result_register());
2168 int offset = Context::SlotOffset(var->index());
2169 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2170 kDontSaveFPRegs);
2171 }
2172}
2173
2174
2175void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2176 FeedbackVectorSlot slot) {
2177 if (var->IsUnallocated()) {
2178 // Global var, const, or let.
2179 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2180 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2181 EmitLoadStoreICSlot(slot);
2182 CallStoreIC();
2183
2184 } else if (var->mode() == LET && op != Token::INIT) {
2185 // Non-initializing assignment to let variable needs a write barrier.
2186 DCHECK(!var->IsLookupSlot());
2187 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2188 Label assign;
2189 MemOperand location = VarOperand(var, r4);
2190 __ LoadP(r6, location);
2191 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2192 __ bne(&assign);
2193 __ mov(r6, Operand(var->name()));
2194 __ push(r6);
2195 __ CallRuntime(Runtime::kThrowReferenceError);
2196 // Perform the assignment.
2197 __ bind(&assign);
2198 EmitStoreToStackLocalOrContextSlot(var, location);
2199
2200 } else if (var->mode() == CONST && op != Token::INIT) {
2201 // Assignment to const variable needs a write barrier.
2202 DCHECK(!var->IsLookupSlot());
2203 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2204 Label const_error;
2205 MemOperand location = VarOperand(var, r4);
2206 __ LoadP(r6, location);
2207 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2208 __ bne(&const_error);
2209 __ mov(r6, Operand(var->name()));
2210 __ push(r6);
2211 __ CallRuntime(Runtime::kThrowReferenceError);
2212 __ bind(&const_error);
2213 __ CallRuntime(Runtime::kThrowConstAssignError);
2214
2215 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2216 // Initializing assignment to const {this} needs a write barrier.
2217 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2218 Label uninitialized_this;
2219 MemOperand location = VarOperand(var, r4);
2220 __ LoadP(r6, location);
2221 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2222 __ beq(&uninitialized_this);
2223 __ mov(r4, Operand(var->name()));
2224 __ push(r4);
2225 __ CallRuntime(Runtime::kThrowReferenceError);
2226 __ bind(&uninitialized_this);
2227 EmitStoreToStackLocalOrContextSlot(var, location);
2228
Ben Murdochc5610432016-08-08 18:44:38 +01002229 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002230 if (var->IsLookupSlot()) {
2231 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002232 __ Push(var->name());
2233 __ Push(r3);
2234 __ CallRuntime(is_strict(language_mode())
2235 ? Runtime::kStoreLookupSlot_Strict
2236 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 } else {
2238 // Assignment to var or initializing assignment to let/const in harmony
2239 // mode.
2240 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2241 MemOperand location = VarOperand(var, r4);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002242 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002243 // Check for an uninitialized let binding.
2244 __ LoadP(r5, location);
2245 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2246 __ Check(eq, kLetBindingReInitialization);
2247 }
2248 EmitStoreToStackLocalOrContextSlot(var, location);
2249 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002250 } else {
2251 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2252 if (is_strict(language_mode())) {
2253 __ CallRuntime(Runtime::kThrowConstAssignError);
2254 }
2255 // Silently ignore store in sloppy mode.
2256 }
2257}
2258
2259
2260void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2261 // Assignment to a property, using a named store IC.
2262 Property* prop = expr->target()->AsProperty();
2263 DCHECK(prop != NULL);
2264 DCHECK(prop->key()->IsLiteral());
2265
2266 __ mov(StoreDescriptor::NameRegister(),
2267 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002268 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 EmitLoadStoreICSlot(expr->AssignmentSlot());
2270 CallStoreIC();
2271
Ben Murdochc5610432016-08-08 18:44:38 +01002272 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002273 context()->Plug(r3);
2274}
2275
2276
2277void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2278 // Assignment to named property of super.
2279 // r3 : value
2280 // stack : receiver ('this'), home_object
2281 DCHECK(prop != NULL);
2282 Literal* key = prop->key()->AsLiteral();
2283 DCHECK(key != NULL);
2284
Ben Murdoch097c5b22016-05-18 11:27:45 +01002285 PushOperand(key->value());
2286 PushOperand(r3);
2287 CallRuntimeWithOperands((is_strict(language_mode())
2288 ? Runtime::kStoreToSuper_Strict
2289 : Runtime::kStoreToSuper_Sloppy));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002290}
2291
2292
2293void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2294 // Assignment to named property of super.
2295 // r3 : value
2296 // stack : receiver ('this'), home_object, key
2297 DCHECK(prop != NULL);
2298
Ben Murdoch097c5b22016-05-18 11:27:45 +01002299 PushOperand(r3);
2300 CallRuntimeWithOperands((is_strict(language_mode())
2301 ? Runtime::kStoreKeyedToSuper_Strict
2302 : Runtime::kStoreKeyedToSuper_Sloppy));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002303}
2304
2305
2306void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2307 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002308 PopOperands(StoreDescriptor::ReceiverRegister(),
2309 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002310 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2311
2312 Handle<Code> ic =
2313 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2314 EmitLoadStoreICSlot(expr->AssignmentSlot());
2315 CallIC(ic);
2316
Ben Murdochc5610432016-08-08 18:44:38 +01002317 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002318 context()->Plug(r3);
2319}
2320
2321
2322void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2323 ic_total_count_++;
2324 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2325}
2326
2327
2328// Code common for calls using the IC.
2329void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2330 Expression* callee = expr->expression();
2331
2332 // Get the target function.
2333 ConvertReceiverMode convert_mode;
2334 if (callee->IsVariableProxy()) {
2335 {
2336 StackValueContext context(this);
2337 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002338 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002339 }
2340 // Push undefined as receiver. This is patched in the method prologue if it
2341 // is a sloppy mode method.
2342 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002343 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002344 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2345 } else {
2346 // Load the function from the receiver.
2347 DCHECK(callee->IsProperty());
2348 DCHECK(!callee->AsProperty()->IsSuperAccess());
2349 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2350 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002351 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2352 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002353 // Push the target function under the receiver.
2354 __ LoadP(r0, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002355 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002356 __ StoreP(r3, MemOperand(sp, kPointerSize));
2357 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2358 }
2359
2360 EmitCall(expr, convert_mode);
2361}
2362
2363
2364void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2365 Expression* callee = expr->expression();
2366 DCHECK(callee->IsProperty());
2367 Property* prop = callee->AsProperty();
2368 DCHECK(prop->IsSuperAccess());
2369 SetExpressionPosition(prop);
2370
2371 Literal* key = prop->key()->AsLiteral();
2372 DCHECK(!key->value()->IsSmi());
2373 // Load the function from the receiver.
2374 const Register scratch = r4;
2375 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2376 VisitForAccumulatorValue(super_ref->home_object());
2377 __ mr(scratch, r3);
2378 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002379 PushOperands(scratch, r3, r3, scratch);
2380 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002381
2382 // Stack here:
2383 // - home_object
2384 // - this (receiver)
2385 // - this (receiver) <-- LoadFromSuper will pop here and below.
2386 // - home_object
2387 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002388 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002389 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002390
2391 // Replace home_object with target function.
2392 __ StoreP(r3, MemOperand(sp, kPointerSize));
2393
2394 // Stack here:
2395 // - target function
2396 // - this (receiver)
2397 EmitCall(expr);
2398}
2399
2400
2401// Code common for calls using the IC.
2402void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2403 // Load the key.
2404 VisitForAccumulatorValue(key);
2405
2406 Expression* callee = expr->expression();
2407
2408 // Load the function from the receiver.
2409 DCHECK(callee->IsProperty());
2410 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2411 __ Move(LoadDescriptor::NameRegister(), r3);
2412 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002413 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2414 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002415
2416 // Push the target function under the receiver.
2417 __ LoadP(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002418 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002419 __ StoreP(r3, MemOperand(sp, kPointerSize));
2420
2421 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2422}
2423
2424
2425void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2426 Expression* callee = expr->expression();
2427 DCHECK(callee->IsProperty());
2428 Property* prop = callee->AsProperty();
2429 DCHECK(prop->IsSuperAccess());
2430
2431 SetExpressionPosition(prop);
2432 // Load the function from the receiver.
2433 const Register scratch = r4;
2434 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2435 VisitForAccumulatorValue(super_ref->home_object());
2436 __ mr(scratch, r3);
2437 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002438 PushOperands(scratch, r3, r3, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002439 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002440
2441 // Stack here:
2442 // - home_object
2443 // - this (receiver)
2444 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2445 // - home_object
2446 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002447 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002448 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002449
2450 // Replace home_object with target function.
2451 __ StoreP(r3, MemOperand(sp, kPointerSize));
2452
2453 // Stack here:
2454 // - target function
2455 // - this (receiver)
2456 EmitCall(expr);
2457}
2458
2459
2460void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2461 // Load the arguments.
2462 ZoneList<Expression*>* args = expr->arguments();
2463 int arg_count = args->length();
2464 for (int i = 0; i < arg_count; i++) {
2465 VisitForStackValue(args->at(i));
2466 }
2467
Ben Murdochc5610432016-08-08 18:44:38 +01002468 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002469 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002470 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2471 if (FLAG_trace) {
2472 __ CallRuntime(Runtime::kTraceTailCall);
2473 }
2474 // Update profiling counters before the tail call since we will
2475 // not return to this function.
2476 EmitProfilingCounterHandlingForReturnSequence(true);
2477 }
2478 Handle<Code> ic =
2479 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2480 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002481 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
2482 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2483 // Don't assign a type feedback id to the IC, since type feedback is provided
2484 // by the vector above.
2485 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002486 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002487
2488 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002489 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002490 context()->DropAndPlug(1, r3);
2491}
2492
2493
Ben Murdochc5610432016-08-08 18:44:38 +01002494void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2495 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002496 // r7: copy of the first argument or undefined if it doesn't exist.
2497 if (arg_count > 0) {
2498 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
2499 } else {
2500 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2501 }
2502
2503 // r6: the receiver of the enclosing function.
2504 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2505
2506 // r5: language mode.
2507 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
2508
2509 // r4: the start position of the scope the calls resides in.
2510 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
2511
Ben Murdochc5610432016-08-08 18:44:38 +01002512 // r3: the source position of the eval call.
2513 __ LoadSmiLiteral(r3, Smi::FromInt(expr->position()));
2514
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002515 // Do the runtime call.
Ben Murdochc5610432016-08-08 18:44:38 +01002516 __ Push(r7, r6, r5, r4, r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002517 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2518}
2519
2520
2521// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2522void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2523 VariableProxy* callee = expr->expression()->AsVariableProxy();
2524 if (callee->var()->IsLookupSlot()) {
2525 Label slow, done;
2526 SetExpressionPosition(callee);
2527 // Generate code for loading from variables potentially shadowed by
2528 // eval-introduced variables.
2529 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2530
2531 __ bind(&slow);
2532 // Call the runtime to find the function to call (returned in r3) and
2533 // the object holding it (returned in r4).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002534 __ Push(callee->name());
2535 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2536 PushOperands(r3, r4); // Function, receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002537 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002538
2539 // If fast case code has been generated, emit code to push the function
2540 // and receiver and have the slow path jump around this code.
2541 if (done.is_linked()) {
2542 Label call;
2543 __ b(&call);
2544 __ bind(&done);
2545 // Push function.
2546 __ push(r3);
2547 // Pass undefined as the receiver, which is the WithBaseObject of a
2548 // non-object environment record. If the callee is sloppy, it will patch
2549 // it up to be the global receiver.
2550 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2551 __ push(r4);
2552 __ bind(&call);
2553 }
2554 } else {
2555 VisitForStackValue(callee);
2556 // refEnv.WithBaseObject()
2557 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002558 PushOperand(r5); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002559 }
2560}
2561
2562
2563void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002564 // In a call to eval, we first call
2565 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
2566 // to call. Then we call the resolved function using the given arguments.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002567 ZoneList<Expression*>* args = expr->arguments();
2568 int arg_count = args->length();
2569
2570 PushCalleeAndWithBaseObject(expr);
2571
2572 // Push the arguments.
2573 for (int i = 0; i < arg_count; i++) {
2574 VisitForStackValue(args->at(i));
2575 }
2576
2577 // Push a copy of the function (found below the arguments) and
2578 // resolve eval.
2579 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2580 __ push(r4);
Ben Murdochc5610432016-08-08 18:44:38 +01002581 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002582
2583 // Touch up the stack with the resolved function.
2584 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2585
Ben Murdochc5610432016-08-08 18:44:38 +01002586 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002587
2588 // Record source position for debugger.
2589 SetCallPosition(expr);
2590 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2591 __ mov(r3, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002592 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2593 expr->tail_call_mode()),
2594 RelocInfo::CODE_TARGET);
2595 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002596 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002597 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002598 context()->DropAndPlug(1, r3);
2599}
2600
2601
2602void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2603 Comment cmnt(masm_, "[ CallNew");
2604 // According to ECMA-262, section 11.2.2, page 44, the function
2605 // expression in new calls must be evaluated before the
2606 // arguments.
2607
2608 // Push constructor on the stack. If it's not a function it's used as
2609 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2610 // ignored.
2611 DCHECK(!expr->expression()->IsSuperPropertyReference());
2612 VisitForStackValue(expr->expression());
2613
2614 // Push the arguments ("left-to-right") on the stack.
2615 ZoneList<Expression*>* args = expr->arguments();
2616 int arg_count = args->length();
2617 for (int i = 0; i < arg_count; i++) {
2618 VisitForStackValue(args->at(i));
2619 }
2620
2621 // Call the construct call builtin that handles allocation and
2622 // constructor invocation.
2623 SetConstructCallPosition(expr);
2624
2625 // Load function and argument count into r4 and r3.
2626 __ mov(r3, Operand(arg_count));
2627 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2628
2629 // Record call targets in unoptimized code.
2630 __ EmitLoadTypeFeedbackVector(r5);
2631 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
2632
2633 CallConstructStub stub(isolate());
2634 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002635 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002636 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2637 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002638 context()->Plug(r3);
2639}
2640
2641
2642void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2643 SuperCallReference* super_call_ref =
2644 expr->expression()->AsSuperCallReference();
2645 DCHECK_NOT_NULL(super_call_ref);
2646
2647 // Push the super constructor target on the stack (may be null,
2648 // but the Construct builtin can deal with that properly).
2649 VisitForAccumulatorValue(super_call_ref->this_function_var());
2650 __ AssertFunction(result_register());
2651 __ LoadP(result_register(),
2652 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2653 __ LoadP(result_register(),
2654 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002655 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002656
2657 // Push the arguments ("left-to-right") on the stack.
2658 ZoneList<Expression*>* args = expr->arguments();
2659 int arg_count = args->length();
2660 for (int i = 0; i < arg_count; i++) {
2661 VisitForStackValue(args->at(i));
2662 }
2663
2664 // Call the construct call builtin that handles allocation and
2665 // constructor invocation.
2666 SetConstructCallPosition(expr);
2667
2668 // Load new target into r6.
2669 VisitForAccumulatorValue(super_call_ref->new_target_var());
2670 __ mr(r6, result_register());
2671
2672 // Load function and argument count into r1 and r0.
2673 __ mov(r3, Operand(arg_count));
2674 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
2675
2676 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002677 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002678
2679 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002680 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002681 context()->Plug(r3);
2682}
2683
2684
2685void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2686 ZoneList<Expression*>* args = expr->arguments();
2687 DCHECK(args->length() == 1);
2688
2689 VisitForAccumulatorValue(args->at(0));
2690
2691 Label materialize_true, materialize_false;
2692 Label* if_true = NULL;
2693 Label* if_false = NULL;
2694 Label* fall_through = NULL;
2695 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2696 &if_false, &fall_through);
2697
2698 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2699 __ TestIfSmi(r3, r0);
2700 Split(eq, if_true, if_false, fall_through, cr0);
2701
2702 context()->Plug(if_true, if_false);
2703}
2704
2705
2706void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2707 ZoneList<Expression*>* args = expr->arguments();
2708 DCHECK(args->length() == 1);
2709
2710 VisitForAccumulatorValue(args->at(0));
2711
2712 Label materialize_true, materialize_false;
2713 Label* if_true = NULL;
2714 Label* if_false = NULL;
2715 Label* fall_through = NULL;
2716 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2717 &if_false, &fall_through);
2718
2719 __ JumpIfSmi(r3, if_false);
2720 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2721 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2722 Split(ge, if_true, if_false, fall_through);
2723
2724 context()->Plug(if_true, if_false);
2725}
2726
2727
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002728void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2729 ZoneList<Expression*>* args = expr->arguments();
2730 DCHECK(args->length() == 1);
2731
2732 VisitForAccumulatorValue(args->at(0));
2733
2734 Label materialize_true, materialize_false;
2735 Label* if_true = NULL;
2736 Label* if_false = NULL;
2737 Label* fall_through = NULL;
2738 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2739 &if_false, &fall_through);
2740
2741 __ JumpIfSmi(r3, if_false);
2742 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
2743 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2744 Split(eq, if_true, if_false, fall_through);
2745
2746 context()->Plug(if_true, if_false);
2747}
2748
2749
2750void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2751 ZoneList<Expression*>* args = expr->arguments();
2752 DCHECK(args->length() == 1);
2753
2754 VisitForAccumulatorValue(args->at(0));
2755
2756 Label materialize_true, materialize_false;
2757 Label* if_true = NULL;
2758 Label* if_false = NULL;
2759 Label* fall_through = NULL;
2760 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2761 &if_false, &fall_through);
2762
2763 __ JumpIfSmi(r3, if_false);
2764 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
2765 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2766 Split(eq, if_true, if_false, fall_through);
2767
2768 context()->Plug(if_true, if_false);
2769}
2770
2771
2772void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2773 ZoneList<Expression*>* args = expr->arguments();
2774 DCHECK(args->length() == 1);
2775
2776 VisitForAccumulatorValue(args->at(0));
2777
2778 Label materialize_true, materialize_false;
2779 Label* if_true = NULL;
2780 Label* if_false = NULL;
2781 Label* fall_through = NULL;
2782 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2783 &if_false, &fall_through);
2784
2785 __ JumpIfSmi(r3, if_false);
2786 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
2787 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2788 Split(eq, if_true, if_false, fall_through);
2789
2790 context()->Plug(if_true, if_false);
2791}
2792
2793
2794void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2795 ZoneList<Expression*>* args = expr->arguments();
2796 DCHECK(args->length() == 1);
2797
2798 VisitForAccumulatorValue(args->at(0));
2799
2800 Label materialize_true, materialize_false;
2801 Label* if_true = NULL;
2802 Label* if_false = NULL;
2803 Label* fall_through = NULL;
2804 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2805 &if_false, &fall_through);
2806
2807 __ JumpIfSmi(r3, if_false);
2808 __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE);
2809 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2810 Split(eq, if_true, if_false, fall_through);
2811
2812 context()->Plug(if_true, if_false);
2813}
2814
2815
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002816void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2817 ZoneList<Expression*>* args = expr->arguments();
2818 DCHECK(args->length() == 1);
2819 Label done, null, function, non_function_constructor;
2820
2821 VisitForAccumulatorValue(args->at(0));
2822
2823 // If the object is not a JSReceiver, we return null.
2824 __ JumpIfSmi(r3, &null);
2825 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2826 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
2827 // Map is now in r3.
2828 __ blt(&null);
2829
Ben Murdochda12d292016-06-02 14:46:10 +01002830 // Return 'Function' for JSFunction and JSBoundFunction objects.
2831 __ cmpli(r4, Operand(FIRST_FUNCTION_TYPE));
2832 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2833 __ bge(&function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002834
2835 // Check if the constructor in the map is a JS function.
2836 Register instance_type = r5;
2837 __ GetMapConstructor(r3, r3, r4, instance_type);
2838 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
2839 __ bne(&non_function_constructor);
2840
2841 // r3 now contains the constructor function. Grab the
2842 // instance class name from there.
2843 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2844 __ LoadP(r3,
2845 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
2846 __ b(&done);
2847
2848 // Functions have class 'Function'.
2849 __ bind(&function);
2850 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
2851 __ b(&done);
2852
2853 // Objects with a non-function constructor have class 'Object'.
2854 __ bind(&non_function_constructor);
2855 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
2856 __ b(&done);
2857
2858 // Non-JS objects have class null.
2859 __ bind(&null);
2860 __ LoadRoot(r3, Heap::kNullValueRootIndex);
2861
2862 // All done.
2863 __ bind(&done);
2864
2865 context()->Plug(r3);
2866}
2867
2868
2869void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2870 ZoneList<Expression*>* args = expr->arguments();
2871 DCHECK(args->length() == 1);
2872 VisitForAccumulatorValue(args->at(0)); // Load the object.
2873
2874 Label done;
2875 // If the object is a smi return the object.
2876 __ JumpIfSmi(r3, &done);
2877 // If the object is not a value type, return the object.
2878 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
2879 __ bne(&done);
2880 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
2881
2882 __ bind(&done);
2883 context()->Plug(r3);
2884}
2885
2886
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002887void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2888 ZoneList<Expression*>* args = expr->arguments();
2889 DCHECK(args->length() == 1);
2890 VisitForAccumulatorValue(args->at(0));
2891
2892 Label done;
2893 StringCharFromCodeGenerator generator(r3, r4);
2894 generator.GenerateFast(masm_);
2895 __ b(&done);
2896
2897 NopRuntimeCallHelper call_helper;
2898 generator.GenerateSlow(masm_, call_helper);
2899
2900 __ bind(&done);
2901 context()->Plug(r4);
2902}
2903
2904
2905void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2906 ZoneList<Expression*>* args = expr->arguments();
2907 DCHECK(args->length() == 2);
2908 VisitForStackValue(args->at(0));
2909 VisitForAccumulatorValue(args->at(1));
2910
2911 Register object = r4;
2912 Register index = r3;
2913 Register result = r6;
2914
Ben Murdoch097c5b22016-05-18 11:27:45 +01002915 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002916
2917 Label need_conversion;
2918 Label index_out_of_range;
2919 Label done;
2920 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
Ben Murdoch61f157c2016-09-16 13:49:30 +01002921 &need_conversion, &index_out_of_range);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002922 generator.GenerateFast(masm_);
2923 __ b(&done);
2924
2925 __ bind(&index_out_of_range);
2926 // When the index is out of range, the spec requires us to return
2927 // NaN.
2928 __ LoadRoot(result, Heap::kNanValueRootIndex);
2929 __ b(&done);
2930
2931 __ bind(&need_conversion);
2932 // Load the undefined value into the result register, which will
2933 // trigger conversion.
2934 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2935 __ b(&done);
2936
2937 NopRuntimeCallHelper call_helper;
2938 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2939
2940 __ bind(&done);
2941 context()->Plug(result);
2942}
2943
2944
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002945void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2946 ZoneList<Expression*>* args = expr->arguments();
2947 DCHECK_LE(2, args->length());
2948 // Push target, receiver and arguments onto the stack.
2949 for (Expression* const arg : *args) {
2950 VisitForStackValue(arg);
2951 }
Ben Murdochc5610432016-08-08 18:44:38 +01002952 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002953 // Move target to r4.
2954 int const argc = args->length() - 2;
2955 __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize));
2956 // Call the target.
2957 __ mov(r3, Operand(argc));
2958 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002959 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002960 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002961 // Discard the function left on TOS.
2962 context()->DropAndPlug(1, r3);
2963}
2964
2965
2966void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2967 ZoneList<Expression*>* args = expr->arguments();
2968 VisitForAccumulatorValue(args->at(0));
2969
2970 Label materialize_true, materialize_false;
2971 Label* if_true = NULL;
2972 Label* if_false = NULL;
2973 Label* fall_through = NULL;
2974 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2975 &if_false, &fall_through);
2976
2977 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
2978 // PPC - assume ip is free
2979 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
2980 __ and_(r0, r3, ip, SetRC);
2981 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2982 Split(eq, if_true, if_false, fall_through, cr0);
2983
2984 context()->Plug(if_true, if_false);
2985}
2986
2987
2988void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
2989 ZoneList<Expression*>* args = expr->arguments();
2990 DCHECK(args->length() == 1);
2991 VisitForAccumulatorValue(args->at(0));
2992
2993 __ AssertString(r3);
2994
2995 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
2996 __ IndexFromHash(r3, r3);
2997
2998 context()->Plug(r3);
2999}
3000
3001
3002void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3003 ZoneList<Expression*>* args = expr->arguments();
3004 DCHECK_EQ(1, args->length());
3005 VisitForAccumulatorValue(args->at(0));
3006 __ AssertFunction(r3);
3007 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3008 __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset));
3009 context()->Plug(r3);
3010}
3011
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003012void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3013 DCHECK(expr->arguments()->length() == 0);
3014 ExternalReference debug_is_active =
3015 ExternalReference::debug_is_active_address(isolate());
3016 __ mov(ip, Operand(debug_is_active));
3017 __ lbz(r3, MemOperand(ip));
3018 __ SmiTag(r3);
3019 context()->Plug(r3);
3020}
3021
3022
3023void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3024 ZoneList<Expression*>* args = expr->arguments();
3025 DCHECK_EQ(2, args->length());
3026 VisitForStackValue(args->at(0));
3027 VisitForStackValue(args->at(1));
3028
3029 Label runtime, done;
3030
Ben Murdochc5610432016-08-08 18:44:38 +01003031 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime,
3032 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003033 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
3034 __ Pop(r5, r6);
3035 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
3036 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
3037 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
3038 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
3039 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
3040 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
3041 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3042 __ b(&done);
3043
3044 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003045 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003046
3047 __ bind(&done);
3048 context()->Plug(r3);
3049}
3050
3051
3052void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003053 // Push function.
3054 __ LoadNativeContextSlot(expr->context_index(), r3);
3055 PushOperand(r3);
3056
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003057 // Push undefined as the receiver.
3058 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003059 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003060}
3061
3062
3063void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3064 ZoneList<Expression*>* args = expr->arguments();
3065 int arg_count = args->length();
3066
3067 SetCallPosition(expr);
3068 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3069 __ mov(r3, Operand(arg_count));
3070 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3071 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003072 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003073 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003074}
3075
3076
3077void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3078 switch (expr->op()) {
3079 case Token::DELETE: {
3080 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3081 Property* property = expr->expression()->AsProperty();
3082 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3083
3084 if (property != NULL) {
3085 VisitForStackValue(property->obj());
3086 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003087 CallRuntimeWithOperands(is_strict(language_mode())
3088 ? Runtime::kDeleteProperty_Strict
3089 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003090 context()->Plug(r3);
3091 } else if (proxy != NULL) {
3092 Variable* var = proxy->var();
3093 // Delete of an unqualified identifier is disallowed in strict mode but
3094 // "delete this" is allowed.
3095 bool is_this = var->HasThisName(isolate());
3096 DCHECK(is_sloppy(language_mode()) || is_this);
3097 if (var->IsUnallocatedOrGlobalSlot()) {
3098 __ LoadGlobalObject(r5);
3099 __ mov(r4, Operand(var->name()));
3100 __ Push(r5, r4);
3101 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3102 context()->Plug(r3);
3103 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3104 // Result of deleting non-global, non-dynamic variables is false.
3105 // The subexpression does not have side effects.
3106 context()->Plug(is_this);
3107 } else {
3108 // Non-global variable. Call the runtime to try to delete from the
3109 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003110 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003111 __ CallRuntime(Runtime::kDeleteLookupSlot);
3112 context()->Plug(r3);
3113 }
3114 } else {
3115 // Result of deleting non-property, non-variable reference is true.
3116 // The subexpression may have side effects.
3117 VisitForEffect(expr->expression());
3118 context()->Plug(true);
3119 }
3120 break;
3121 }
3122
3123 case Token::VOID: {
3124 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3125 VisitForEffect(expr->expression());
3126 context()->Plug(Heap::kUndefinedValueRootIndex);
3127 break;
3128 }
3129
3130 case Token::NOT: {
3131 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3132 if (context()->IsEffect()) {
3133 // Unary NOT has no side effects so it's only necessary to visit the
3134 // subexpression. Match the optimizing compiler by not branching.
3135 VisitForEffect(expr->expression());
3136 } else if (context()->IsTest()) {
3137 const TestContext* test = TestContext::cast(context());
3138 // The labels are swapped for the recursive call.
3139 VisitForControl(expr->expression(), test->false_label(),
3140 test->true_label(), test->fall_through());
3141 context()->Plug(test->true_label(), test->false_label());
3142 } else {
3143 // We handle value contexts explicitly rather than simply visiting
3144 // for control and plugging the control flow into the context,
3145 // because we need to prepare a pair of extra administrative AST ids
3146 // for the optimizing compiler.
3147 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3148 Label materialize_true, materialize_false, done;
3149 VisitForControl(expr->expression(), &materialize_false,
3150 &materialize_true, &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003151 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003152 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003153 PrepareForBailoutForId(expr->MaterializeTrueId(),
3154 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003155 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
3156 if (context()->IsStackValue()) __ push(r3);
3157 __ b(&done);
3158 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003159 PrepareForBailoutForId(expr->MaterializeFalseId(),
3160 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003161 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
3162 if (context()->IsStackValue()) __ push(r3);
3163 __ bind(&done);
3164 }
3165 break;
3166 }
3167
3168 case Token::TYPEOF: {
3169 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3170 {
3171 AccumulatorValueContext context(this);
3172 VisitForTypeofValue(expr->expression());
3173 }
3174 __ mr(r6, r3);
3175 TypeofStub typeof_stub(isolate());
3176 __ CallStub(&typeof_stub);
3177 context()->Plug(r3);
3178 break;
3179 }
3180
3181 default:
3182 UNREACHABLE();
3183 }
3184}
3185
3186
3187void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3188 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3189
3190 Comment cmnt(masm_, "[ CountOperation");
3191
3192 Property* prop = expr->expression()->AsProperty();
3193 LhsKind assign_type = Property::GetAssignType(prop);
3194
3195 // Evaluate expression and get value.
3196 if (assign_type == VARIABLE) {
3197 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3198 AccumulatorValueContext context(this);
3199 EmitVariableLoad(expr->expression()->AsVariableProxy());
3200 } else {
3201 // Reserve space for result of postfix operation.
3202 if (expr->is_postfix() && !context()->IsEffect()) {
3203 __ LoadSmiLiteral(ip, Smi::FromInt(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003204 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003205 }
3206 switch (assign_type) {
3207 case NAMED_PROPERTY: {
3208 // Put the object both on the stack and in the register.
3209 VisitForStackValue(prop->obj());
3210 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3211 EmitNamedPropertyLoad(prop);
3212 break;
3213 }
3214
3215 case NAMED_SUPER_PROPERTY: {
3216 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3217 VisitForAccumulatorValue(
3218 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003219 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003220 const Register scratch = r4;
3221 __ LoadP(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003222 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003223 EmitNamedSuperPropertyLoad(prop);
3224 break;
3225 }
3226
3227 case KEYED_SUPER_PROPERTY: {
3228 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3229 VisitForAccumulatorValue(
3230 prop->obj()->AsSuperPropertyReference()->home_object());
3231 const Register scratch = r4;
3232 const Register scratch1 = r5;
3233 __ mr(scratch, result_register());
3234 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003235 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003236 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003237 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003238 EmitKeyedSuperPropertyLoad(prop);
3239 break;
3240 }
3241
3242 case KEYED_PROPERTY: {
3243 VisitForStackValue(prop->obj());
3244 VisitForStackValue(prop->key());
3245 __ LoadP(LoadDescriptor::ReceiverRegister(),
3246 MemOperand(sp, 1 * kPointerSize));
3247 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3248 EmitKeyedPropertyLoad(prop);
3249 break;
3250 }
3251
3252 case VARIABLE:
3253 UNREACHABLE();
3254 }
3255 }
3256
3257 // We need a second deoptimization point after loading the value
3258 // in case evaluating the property load my have a side effect.
3259 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003260 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003261 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003262 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003263 }
3264
3265 // Inline smi case if we are in a loop.
3266 Label stub_call, done;
3267 JumpPatchSite patch_site(masm_);
3268
3269 int count_value = expr->op() == Token::INC ? 1 : -1;
3270 if (ShouldInlineSmiCase(expr->op())) {
3271 Label slow;
3272 patch_site.EmitJumpIfNotSmi(r3, &slow);
3273
3274 // Save result for postfix expressions.
3275 if (expr->is_postfix()) {
3276 if (!context()->IsEffect()) {
3277 // Save the result on the stack. If we have a named or keyed property
3278 // we store the result under the receiver that is currently on top
3279 // of the stack.
3280 switch (assign_type) {
3281 case VARIABLE:
3282 __ push(r3);
3283 break;
3284 case NAMED_PROPERTY:
3285 __ StoreP(r3, MemOperand(sp, kPointerSize));
3286 break;
3287 case NAMED_SUPER_PROPERTY:
3288 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3289 break;
3290 case KEYED_PROPERTY:
3291 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3292 break;
3293 case KEYED_SUPER_PROPERTY:
3294 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3295 break;
3296 }
3297 }
3298 }
3299
3300 Register scratch1 = r4;
3301 Register scratch2 = r5;
3302 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
3303 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
3304 __ BranchOnNoOverflow(&done);
3305 // Call stub. Undo operation first.
3306 __ sub(r3, r3, scratch1);
3307 __ b(&stub_call);
3308 __ bind(&slow);
3309 }
Ben Murdochda12d292016-06-02 14:46:10 +01003310
3311 // Convert old value into a number.
Ben Murdoch61f157c2016-09-16 13:49:30 +01003312 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdochc5610432016-08-08 18:44:38 +01003313 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003314
3315 // Save result for postfix expressions.
3316 if (expr->is_postfix()) {
3317 if (!context()->IsEffect()) {
3318 // Save the result on the stack. If we have a named or keyed property
3319 // we store the result under the receiver that is currently on top
3320 // of the stack.
3321 switch (assign_type) {
3322 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003323 PushOperand(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003324 break;
3325 case NAMED_PROPERTY:
3326 __ StoreP(r3, MemOperand(sp, kPointerSize));
3327 break;
3328 case NAMED_SUPER_PROPERTY:
3329 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3330 break;
3331 case KEYED_PROPERTY:
3332 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3333 break;
3334 case KEYED_SUPER_PROPERTY:
3335 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3336 break;
3337 }
3338 }
3339 }
3340
3341 __ bind(&stub_call);
3342 __ mr(r4, r3);
3343 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
3344
3345 SetExpressionPosition(expr);
3346
Ben Murdoch097c5b22016-05-18 11:27:45 +01003347 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003348 CallIC(code, expr->CountBinOpFeedbackId());
3349 patch_site.EmitPatchInfo();
3350 __ bind(&done);
3351
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003352 // Store the value returned in r3.
3353 switch (assign_type) {
3354 case VARIABLE:
3355 if (expr->is_postfix()) {
3356 {
3357 EffectContext context(this);
3358 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3359 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003360 PrepareForBailoutForId(expr->AssignmentId(),
3361 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003362 context.Plug(r3);
3363 }
3364 // For all contexts except EffectConstant We have the result on
3365 // top of the stack.
3366 if (!context()->IsEffect()) {
3367 context()->PlugTOS();
3368 }
3369 } else {
3370 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3371 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003372 PrepareForBailoutForId(expr->AssignmentId(),
3373 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003374 context()->Plug(r3);
3375 }
3376 break;
3377 case NAMED_PROPERTY: {
3378 __ mov(StoreDescriptor::NameRegister(),
3379 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003380 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003381 EmitLoadStoreICSlot(expr->CountSlot());
3382 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003383 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003384 if (expr->is_postfix()) {
3385 if (!context()->IsEffect()) {
3386 context()->PlugTOS();
3387 }
3388 } else {
3389 context()->Plug(r3);
3390 }
3391 break;
3392 }
3393 case NAMED_SUPER_PROPERTY: {
3394 EmitNamedSuperPropertyStore(prop);
3395 if (expr->is_postfix()) {
3396 if (!context()->IsEffect()) {
3397 context()->PlugTOS();
3398 }
3399 } else {
3400 context()->Plug(r3);
3401 }
3402 break;
3403 }
3404 case KEYED_SUPER_PROPERTY: {
3405 EmitKeyedSuperPropertyStore(prop);
3406 if (expr->is_postfix()) {
3407 if (!context()->IsEffect()) {
3408 context()->PlugTOS();
3409 }
3410 } else {
3411 context()->Plug(r3);
3412 }
3413 break;
3414 }
3415 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003416 PopOperands(StoreDescriptor::ReceiverRegister(),
3417 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003418 Handle<Code> ic =
3419 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3420 EmitLoadStoreICSlot(expr->CountSlot());
3421 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003422 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003423 if (expr->is_postfix()) {
3424 if (!context()->IsEffect()) {
3425 context()->PlugTOS();
3426 }
3427 } else {
3428 context()->Plug(r3);
3429 }
3430 break;
3431 }
3432 }
3433}
3434
3435
3436void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3437 Expression* sub_expr,
3438 Handle<String> check) {
3439 Label materialize_true, materialize_false;
3440 Label* if_true = NULL;
3441 Label* if_false = NULL;
3442 Label* fall_through = NULL;
3443 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3444 &if_false, &fall_through);
3445
3446 {
3447 AccumulatorValueContext context(this);
3448 VisitForTypeofValue(sub_expr);
3449 }
3450 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3451
3452 Factory* factory = isolate()->factory();
3453 if (String::Equals(check, factory->number_string())) {
3454 __ JumpIfSmi(r3, if_true);
3455 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3456 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3457 __ cmp(r3, ip);
3458 Split(eq, if_true, if_false, fall_through);
3459 } else if (String::Equals(check, factory->string_string())) {
3460 __ JumpIfSmi(r3, if_false);
3461 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
3462 Split(lt, if_true, if_false, fall_through);
3463 } else if (String::Equals(check, factory->symbol_string())) {
3464 __ JumpIfSmi(r3, if_false);
3465 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
3466 Split(eq, if_true, if_false, fall_through);
3467 } else if (String::Equals(check, factory->boolean_string())) {
3468 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3469 __ beq(if_true);
3470 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
3471 Split(eq, if_true, if_false, fall_through);
3472 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003473 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3474 __ beq(if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003475 __ JumpIfSmi(r3, if_false);
3476 // Check for undetectable objects => true.
3477 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3478 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3479 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3480 Split(ne, if_true, if_false, fall_through, cr0);
3481
3482 } else if (String::Equals(check, factory->function_string())) {
3483 __ JumpIfSmi(r3, if_false);
3484 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3485 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3486 __ andi(r4, r4,
3487 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3488 __ cmpi(r4, Operand(1 << Map::kIsCallable));
3489 Split(eq, if_true, if_false, fall_through);
3490 } else if (String::Equals(check, factory->object_string())) {
3491 __ JumpIfSmi(r3, if_false);
3492 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3493 __ beq(if_true);
3494 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3495 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
3496 __ blt(if_false);
3497 // Check for callable or undetectable objects => false.
3498 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3499 __ andi(r0, r4,
3500 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3501 Split(eq, if_true, if_false, fall_through, cr0);
3502// clang-format off
3503#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3504 } else if (String::Equals(check, factory->type##_string())) { \
3505 __ JumpIfSmi(r3, if_false); \
3506 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); \
3507 __ CompareRoot(r3, Heap::k##Type##MapRootIndex); \
3508 Split(eq, if_true, if_false, fall_through);
3509 SIMD128_TYPES(SIMD128_TYPE)
3510#undef SIMD128_TYPE
3511 // clang-format on
3512 } else {
3513 if (if_false != fall_through) __ b(if_false);
3514 }
3515 context()->Plug(if_true, if_false);
3516}
3517
3518
3519void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3520 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003521
3522 // First we try a fast inlined version of the compare when one of
3523 // the operands is a literal.
3524 if (TryLiteralCompare(expr)) return;
3525
3526 // Always perform the comparison for its control flow. Pack the result
3527 // into the expression's context after the comparison is performed.
3528 Label materialize_true, materialize_false;
3529 Label* if_true = NULL;
3530 Label* if_false = NULL;
3531 Label* fall_through = NULL;
3532 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3533 &if_false, &fall_through);
3534
3535 Token::Value op = expr->op();
3536 VisitForStackValue(expr->left());
3537 switch (op) {
3538 case Token::IN:
3539 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003540 SetExpressionPosition(expr);
3541 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003542 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3543 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3544 Split(eq, if_true, if_false, fall_through);
3545 break;
3546
3547 case Token::INSTANCEOF: {
3548 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003549 SetExpressionPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003550 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003551 InstanceOfStub stub(isolate());
3552 __ CallStub(&stub);
3553 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3554 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3555 Split(eq, if_true, if_false, fall_through);
3556 break;
3557 }
3558
3559 default: {
3560 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003561 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003562 Condition cond = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003563 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003564
3565 bool inline_smi_code = ShouldInlineSmiCase(op);
3566 JumpPatchSite patch_site(masm_);
3567 if (inline_smi_code) {
3568 Label slow_case;
3569 __ orx(r5, r3, r4);
3570 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
3571 __ cmp(r4, r3);
3572 Split(cond, if_true, if_false, NULL);
3573 __ bind(&slow_case);
3574 }
3575
Ben Murdoch097c5b22016-05-18 11:27:45 +01003576 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003577 CallIC(ic, expr->CompareOperationFeedbackId());
3578 patch_site.EmitPatchInfo();
3579 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3580 __ cmpi(r3, Operand::Zero());
3581 Split(cond, if_true, if_false, fall_through);
3582 }
3583 }
3584
3585 // Convert the result of the comparison into one expected for this
3586 // expression's context.
3587 context()->Plug(if_true, if_false);
3588}
3589
3590
3591void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3592 Expression* sub_expr,
3593 NilValue nil) {
3594 Label materialize_true, materialize_false;
3595 Label* if_true = NULL;
3596 Label* if_false = NULL;
3597 Label* fall_through = NULL;
3598 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3599 &if_false, &fall_through);
3600
3601 VisitForAccumulatorValue(sub_expr);
3602 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3603 if (expr->op() == Token::EQ_STRICT) {
3604 Heap::RootListIndex nil_value = nil == kNullValue
3605 ? Heap::kNullValueRootIndex
3606 : Heap::kUndefinedValueRootIndex;
3607 __ LoadRoot(r4, nil_value);
3608 __ cmp(r3, r4);
3609 Split(eq, if_true, if_false, fall_through);
3610 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003611 __ JumpIfSmi(r3, if_false);
3612 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3613 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3614 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3615 Split(ne, if_true, if_false, fall_through, cr0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003616 }
3617 context()->Plug(if_true, if_false);
3618}
3619
3620
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003621Register FullCodeGenerator::result_register() { return r3; }
3622
3623
3624Register FullCodeGenerator::context_register() { return cp; }
3625
Ben Murdochda12d292016-06-02 14:46:10 +01003626void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3627 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3628 __ LoadP(value, MemOperand(fp, frame_offset), r0);
3629}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003630
3631void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3632 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3633 __ StoreP(value, MemOperand(fp, frame_offset), r0);
3634}
3635
3636
3637void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3638 __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
3639}
3640
3641
3642void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3643 Scope* closure_scope = scope()->ClosureScope();
3644 if (closure_scope->is_script_scope() ||
3645 closure_scope->is_module_scope()) {
3646 // Contexts nested in the native context have a canonical empty function
3647 // as their closure, not the anonymous closure containing the global
3648 // code.
3649 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3650 } else if (closure_scope->is_eval_scope()) {
3651 // Contexts created by a call to eval have the same closure as the
3652 // context calling eval, not the anonymous closure containing the eval
3653 // code. Fetch it from the context.
3654 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3655 } else {
3656 DCHECK(closure_scope->is_function_scope());
3657 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3658 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003659 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003660}
3661
3662
3663// ----------------------------------------------------------------------------
3664// Non-local control flow support.
3665
3666void FullCodeGenerator::EnterFinallyBlock() {
3667 DCHECK(!result_register().is(r4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003668 // Store pending message while executing finally block.
3669 ExternalReference pending_message_obj =
3670 ExternalReference::address_of_pending_message_obj(isolate());
3671 __ mov(ip, Operand(pending_message_obj));
3672 __ LoadP(r4, MemOperand(ip));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003673 PushOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003674
3675 ClearPendingMessage();
3676}
3677
3678
3679void FullCodeGenerator::ExitFinallyBlock() {
3680 DCHECK(!result_register().is(r4));
3681 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003682 PopOperand(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003683 ExternalReference pending_message_obj =
3684 ExternalReference::address_of_pending_message_obj(isolate());
3685 __ mov(ip, Operand(pending_message_obj));
3686 __ StoreP(r4, MemOperand(ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003687}
3688
3689
3690void FullCodeGenerator::ClearPendingMessage() {
3691 DCHECK(!result_register().is(r4));
3692 ExternalReference pending_message_obj =
3693 ExternalReference::address_of_pending_message_obj(isolate());
3694 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
3695 __ mov(ip, Operand(pending_message_obj));
3696 __ StoreP(r4, MemOperand(ip));
3697}
3698
3699
Ben Murdoch097c5b22016-05-18 11:27:45 +01003700void FullCodeGenerator::DeferredCommands::EmitCommands() {
3701 DCHECK(!result_register().is(r4));
3702 // Restore the accumulator (r3) and token (r4).
3703 __ Pop(r4, result_register());
3704 for (DeferredCommand cmd : commands_) {
3705 Label skip;
3706 __ CmpSmiLiteral(r4, Smi::FromInt(cmd.token), r0);
3707 __ bne(&skip);
3708 switch (cmd.command) {
3709 case kReturn:
3710 codegen_->EmitUnwindAndReturn();
3711 break;
3712 case kThrow:
3713 __ Push(result_register());
3714 __ CallRuntime(Runtime::kReThrow);
3715 break;
3716 case kContinue:
3717 codegen_->EmitContinue(cmd.target);
3718 break;
3719 case kBreak:
3720 codegen_->EmitBreak(cmd.target);
3721 break;
3722 }
3723 __ bind(&skip);
3724 }
3725}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003726
3727#undef __
3728
3729
3730void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
3731 BackEdgeState target_state,
3732 Code* replacement_code) {
3733 Address mov_address = Assembler::target_address_from_return_address(pc);
3734 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
3735 Isolate* isolate = unoptimized_code->GetIsolate();
3736 CodePatcher patcher(isolate, cmp_address, 1);
3737
3738 switch (target_state) {
3739 case INTERRUPT: {
3740 // <decrement profiling counter>
3741 // cmpi r6, 0
3742 // bge <ok> ;; not changed
3743 // mov r12, <interrupt stub address>
3744 // mtlr r12
3745 // blrl
3746 // <reset profiling counter>
3747 // ok-label
3748 patcher.masm()->cmpi(r6, Operand::Zero());
3749 break;
3750 }
3751 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003752 // <decrement profiling counter>
3753 // crset
3754 // bge <ok> ;; not changed
3755 // mov r12, <on-stack replacement address>
3756 // mtlr r12
3757 // blrl
3758 // <reset profiling counter>
3759 // ok-label ----- pc_after points here
3760
3761 // Set the LT bit such that bge is a NOP
3762 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
3763 break;
3764 }
3765
3766 // Replace the stack check address in the mov sequence with the
3767 // entry address of the replacement code.
3768 Assembler::set_target_address_at(isolate, mov_address, unoptimized_code,
3769 replacement_code->entry());
3770
3771 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3772 unoptimized_code, mov_address, replacement_code);
3773}
3774
3775
3776BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3777 Isolate* isolate, Code* unoptimized_code, Address pc) {
3778 Address mov_address = Assembler::target_address_from_return_address(pc);
3779 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01003780#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003781 Address interrupt_address =
3782 Assembler::target_address_at(mov_address, unoptimized_code);
Ben Murdochda12d292016-06-02 14:46:10 +01003783#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003784
3785 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
3786 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
3787 return INTERRUPT;
3788 }
3789
3790 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
3791
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003792 DCHECK(interrupt_address ==
Ben Murdochda12d292016-06-02 14:46:10 +01003793 isolate->builtins()->OnStackReplacement()->entry());
3794 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003795}
3796} // namespace internal
3797} // namespace v8
3798#endif // V8_TARGET_ARCH_PPC