blob: 014aaf6ca70af20bfe30462e806a08fdf4776797 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS
6
7// Note on Mips implementation:
8//
9// The result_register() for mips is the 'v0' register, which is defined
10// by the ABI to contain function return values. However, the first
11// parameter to a function is defined to be 'a0'. So there are many
12// places where we have to move a previous result in v0 to a0 for the
13// next call: mov(a0, v0). This is not needed on the other architectures.
14
15#include "src/ast/scopes.h"
16#include "src/code-factory.h"
17#include "src/code-stubs.h"
18#include "src/codegen.h"
19#include "src/debug/debug.h"
20#include "src/full-codegen/full-codegen.h"
21#include "src/ic/ic.h"
22#include "src/parsing/parser.h"
23
24#include "src/mips/code-stubs-mips.h"
25#include "src/mips/macro-assembler-mips.h"
26
27namespace v8 {
28namespace internal {
29
Ben Murdoch097c5b22016-05-18 11:27:45 +010030#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031
32// A patch site is a location in the code which it is possible to patch. This
33// class has a number of methods to emit the code which is patchable and the
34// method EmitPatchInfo to record a marker back to the patchable code. This
35// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
36// (raw 16 bit immediate value is used) is the delta from the pc to the first
37// instruction of the patchable code.
38// The marker instruction is effectively a NOP (dest is zero_reg) and will
39// never be emitted by normal code.
40class JumpPatchSite BASE_EMBEDDED {
41 public:
42 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
43#ifdef DEBUG
44 info_emitted_ = false;
45#endif
46 }
47
48 ~JumpPatchSite() {
49 DCHECK(patch_site_.is_bound() == info_emitted_);
50 }
51
52 // When initially emitting this ensure that a jump is always generated to skip
53 // the inlined smi code.
54 void EmitJumpIfNotSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
57 __ bind(&patch_site_);
58 __ andi(at, reg, 0);
59 // Always taken before patched.
60 __ BranchShort(target, eq, at, Operand(zero_reg));
61 }
62
63 // When initially emitting this ensure that a jump is never generated to skip
64 // the inlined smi code.
65 void EmitJumpIfSmi(Register reg, Label* target) {
66 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 __ bind(&patch_site_);
69 __ andi(at, reg, 0);
70 // Never taken before patched.
71 __ BranchShort(target, ne, at, Operand(zero_reg));
72 }
73
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
77 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
78 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
79#ifdef DEBUG
80 info_emitted_ = true;
81#endif
82 } else {
83 __ nop(); // Signals no inlined code.
84 }
85 }
86
87 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010088 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089 MacroAssembler* masm_;
90 Label patch_site_;
91#ifdef DEBUG
92 bool info_emitted_;
93#endif
94};
95
96
97// Generate code for a JS function. On entry to the function the receiver
98// and arguments have been pushed on the stack left to right. The actual
99// argument count matches the formal parameter count expected by the
100// function.
101//
102// The live registers are:
103// o a1: the JS function object being called (i.e. ourselves)
104// o a3: the new target value
105// o cp: our context
106// o fp: our caller's frame pointer
107// o sp: stack pointer
108// o ra: return address
109//
110// The function builds a JS frame. Please see JavaScriptFrameConstants in
111// frames-mips.h for its layout.
112void FullCodeGenerator::Generate() {
113 CompilationInfo* info = info_;
114 profiling_counter_ = isolate()->factory()->NewCell(
115 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116 SetFunctionPosition(literal());
117 Comment cmnt(masm_, "[ function compiled by full code generator");
118
119 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000121 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
122 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
123 __ lw(a2, MemOperand(sp, receiver_offset));
124 __ AssertNotSmi(a2);
125 __ GetObjectType(a2, a2, a2);
126 __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
127 Operand(FIRST_JS_RECEIVER_TYPE));
128 }
129
130 // Open a frame scope to indicate that there is a frame on the stack. The
131 // MANUAL indicates that the scope shouldn't actually generate code to set up
132 // the frame (that is done below).
133 FrameScope frame_scope(masm_, StackFrame::MANUAL);
134
135 info->set_prologue_offset(masm_->pc_offset());
136 __ Prologue(info->GeneratePreagedPrologue());
137
138 { Comment cmnt(masm_, "[ Allocate locals");
139 int locals_count = info->scope()->num_stack_slots();
140 // Generators allocate locals, if any, in context slots.
141 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100142 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143 if (locals_count > 0) {
144 if (locals_count >= 128) {
145 Label ok;
146 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
147 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
148 __ Branch(&ok, hs, t5, Operand(a2));
149 __ CallRuntime(Runtime::kThrowStackOverflow);
150 __ bind(&ok);
151 }
152 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
153 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
154 if (locals_count >= kMaxPushes) {
155 int loop_iterations = locals_count / kMaxPushes;
156 __ li(a2, Operand(loop_iterations));
157 Label loop_header;
158 __ bind(&loop_header);
159 // Do pushes.
160 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
161 for (int i = 0; i < kMaxPushes; i++) {
162 __ sw(t5, MemOperand(sp, i * kPointerSize));
163 }
164 // Continue loop if not done.
165 __ Subu(a2, a2, Operand(1));
166 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
167 }
168 int remaining = locals_count % kMaxPushes;
169 // Emit the remaining pushes.
170 __ Subu(sp, sp, Operand(remaining * kPointerSize));
171 for (int i = 0; i < remaining; i++) {
172 __ sw(t5, MemOperand(sp, i * kPointerSize));
173 }
174 }
175 }
176
177 bool function_in_register_a1 = true;
178
179 // Possibly allocate a local context.
180 if (info->scope()->num_heap_slots() > 0) {
181 Comment cmnt(masm_, "[ Allocate context");
182 // Argument to NewContext is the function, which is still in a1.
183 bool need_write_barrier = true;
184 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185 if (info->scope()->is_script_scope()) {
186 __ push(a1);
187 __ Push(info->scope()->GetScopeInfo(info->isolate()));
188 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100189 PrepareForBailoutForId(BailoutId::ScriptContext(),
190 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000191 // The new target value is not used, clobbering is safe.
192 DCHECK_NULL(info->scope()->new_target_var());
193 } else {
194 if (info->scope()->new_target_var() != nullptr) {
195 __ push(a3); // Preserve new target.
196 }
197 if (slots <= FastNewContextStub::kMaximumSlots) {
198 FastNewContextStub stub(isolate(), slots);
199 __ CallStub(&stub);
200 // Result of FastNewContextStub is always in new space.
201 need_write_barrier = false;
202 } else {
203 __ push(a1);
204 __ CallRuntime(Runtime::kNewFunctionContext);
205 }
206 if (info->scope()->new_target_var() != nullptr) {
207 __ pop(a3); // Restore new target.
208 }
209 }
210 function_in_register_a1 = false;
211 // Context is returned in v0. It replaces the context passed to us.
212 // It's saved in the stack and kept live in cp.
213 __ mov(cp, v0);
214 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
215 // Copy any necessary parameters into the context.
216 int num_parameters = info->scope()->num_parameters();
217 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
218 for (int i = first_parameter; i < num_parameters; i++) {
219 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
220 if (var->IsContextSlot()) {
221 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
222 (num_parameters - 1 - i) * kPointerSize;
223 // Load parameter from stack.
224 __ lw(a0, MemOperand(fp, parameter_offset));
225 // Store it in the context.
226 MemOperand target = ContextMemOperand(cp, var->index());
227 __ sw(a0, target);
228
229 // Update the write barrier.
230 if (need_write_barrier) {
231 __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
232 kRAHasBeenSaved, kDontSaveFPRegs);
233 } else if (FLAG_debug_code) {
234 Label done;
235 __ JumpIfInNewSpace(cp, a0, &done);
236 __ Abort(kExpectedNewSpaceObject);
237 __ bind(&done);
238 }
239 }
240 }
241 }
242
243 // Register holding this function and new target are both trashed in case we
244 // bailout here. But since that can happen only when new target is not used
245 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100246 PrepareForBailoutForId(BailoutId::FunctionContext(),
247 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248
249 // Possibly set up a local binding to the this function which is used in
250 // derived constructors with super calls.
251 Variable* this_function_var = scope()->this_function_var();
252 if (this_function_var != nullptr) {
253 Comment cmnt(masm_, "[ This function");
254 if (!function_in_register_a1) {
255 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
256 // The write barrier clobbers register again, keep it marked as such.
257 }
258 SetVar(this_function_var, a1, a0, a2);
259 }
260
261 // Possibly set up a local binding to the new target value.
262 Variable* new_target_var = scope()->new_target_var();
263 if (new_target_var != nullptr) {
264 Comment cmnt(masm_, "[ new.target");
265 SetVar(new_target_var, a3, a0, a2);
266 }
267
268 // Possibly allocate RestParameters
269 int rest_index;
270 Variable* rest_param = scope()->rest_parameter(&rest_index);
271 if (rest_param) {
272 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100273 if (!function_in_register_a1) {
274 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
275 }
276 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100278 function_in_register_a1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 SetVar(rest_param, v0, a1, a2);
280 }
281
282 Variable* arguments = scope()->arguments();
283 if (arguments != NULL) {
284 // Function uses arguments object.
285 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 if (!function_in_register_a1) {
287 // Load this again, if it's used by the local context below.
288 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
289 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100290 if (is_strict(language_mode()) || !has_simple_parameters()) {
291 FastNewStrictArgumentsStub stub(isolate());
292 __ CallStub(&stub);
293 } else if (literal()->has_duplicate_parameters()) {
294 __ Push(a1);
295 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
296 } else {
297 FastNewSloppyArgumentsStub stub(isolate());
298 __ CallStub(&stub);
299 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000300
301 SetVar(arguments, v0, a1, a2);
302 }
303
304 if (FLAG_trace) {
305 __ CallRuntime(Runtime::kTraceEnter);
306 }
307
308 // Visit the declarations and body unless there is an illegal
309 // redeclaration.
Ben Murdochc5610432016-08-08 18:44:38 +0100310 PrepareForBailoutForId(BailoutId::FunctionEntry(),
311 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100312 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000313 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100314 VisitDeclarations(scope()->declarations());
315 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000316
Ben Murdochda12d292016-06-02 14:46:10 +0100317 // Assert that the declarations do not use ICs. Otherwise the debugger
318 // won't be able to redirect a PC at an IC to the correct IC in newly
319 // recompiled code.
320 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321
Ben Murdochda12d292016-06-02 14:46:10 +0100322 {
323 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100324 PrepareForBailoutForId(BailoutId::Declarations(),
325 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100326 Label ok;
327 __ LoadRoot(at, Heap::kStackLimitRootIndex);
328 __ Branch(&ok, hs, sp, Operand(at));
329 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
330 PredictableCodeSizeScope predictable(
331 masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
332 __ Call(stack_check, RelocInfo::CODE_TARGET);
333 __ bind(&ok);
334 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000335
Ben Murdochda12d292016-06-02 14:46:10 +0100336 {
337 Comment cmnt(masm_, "[ Body");
338 DCHECK(loop_depth() == 0);
339 VisitStatements(literal()->body());
340 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000341 }
342
343 // Always emit a 'return undefined' in case control fell off the end of
344 // the body.
345 { Comment cmnt(masm_, "[ return <undefined>;");
346 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
347 }
348 EmitReturnSequence();
349}
350
351
352void FullCodeGenerator::ClearAccumulator() {
353 DCHECK(Smi::FromInt(0) == 0);
354 __ mov(v0, zero_reg);
355}
356
357
358void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
359 __ li(a2, Operand(profiling_counter_));
360 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
361 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
362 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
363}
364
365
366void FullCodeGenerator::EmitProfilingCounterReset() {
367 int reset_value = FLAG_interrupt_budget;
368 if (info_->is_debug()) {
369 // Detect debug break requests as soon as possible.
370 reset_value = FLAG_interrupt_budget >> 4;
371 }
372 __ li(a2, Operand(profiling_counter_));
373 __ li(a3, Operand(Smi::FromInt(reset_value)));
374 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
375}
376
377
378void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
379 Label* back_edge_target) {
380 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
381 // to make sure it is constant. Branch may emit a skip-or-jump sequence
382 // instead of the normal Branch. It seems that the "skip" part of that
383 // sequence is about as long as this Branch would be so it is safe to ignore
384 // that.
385 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
386 Comment cmnt(masm_, "[ Back edge bookkeeping");
387 Label ok;
388 DCHECK(back_edge_target->is_bound());
389 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
390 int weight = Min(kMaxBackEdgeWeight,
391 Max(1, distance / kCodeSizeMultiplier));
392 EmitProfilingCounterDecrement(weight);
393 __ slt(at, a3, zero_reg);
394 __ beq(at, zero_reg, &ok);
395 // Call will emit a li t9 first, so it is safe to use the delay slot.
396 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
397 // Record a mapping of this PC offset to the OSR id. This is used to find
398 // the AST id from the unoptimized code in order to use it as a key into
399 // the deoptimization input data found in the optimized code.
400 RecordBackEdge(stmt->OsrEntryId());
401 EmitProfilingCounterReset();
402
403 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100404 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000405 // Record a mapping of the OSR id to this PC. This is used if the OSR
406 // entry becomes the target of a bailout. We don't expect it to be, but
407 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100408 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000409}
410
Ben Murdoch097c5b22016-05-18 11:27:45 +0100411void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
412 bool is_tail_call) {
413 // Pretend that the exit is a backwards jump to the entry.
414 int weight = 1;
415 if (info_->ShouldSelfOptimize()) {
416 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
417 } else {
418 int distance = masm_->pc_offset();
419 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
420 }
421 EmitProfilingCounterDecrement(weight);
422 Label ok;
423 __ Branch(&ok, ge, a3, Operand(zero_reg));
424 // Don't need to save result register if we are going to do a tail call.
425 if (!is_tail_call) {
426 __ push(v0);
427 }
428 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
429 if (!is_tail_call) {
430 __ pop(v0);
431 }
432 EmitProfilingCounterReset();
433 __ bind(&ok);
434}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435
436void FullCodeGenerator::EmitReturnSequence() {
437 Comment cmnt(masm_, "[ Return sequence");
438 if (return_label_.is_bound()) {
439 __ Branch(&return_label_);
440 } else {
441 __ bind(&return_label_);
442 if (FLAG_trace) {
443 // Push the return value on the stack as the parameter.
444 // Runtime::TraceExit returns its parameter in v0.
445 __ push(v0);
446 __ CallRuntime(Runtime::kTraceExit);
447 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100448 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000449
450 // Make sure that the constant pool is not emitted inside of the return
451 // sequence.
452 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
453 // Here we use masm_-> instead of the __ macro to avoid the code coverage
454 // tool from instrumenting as we rely on the code size here.
455 int32_t arg_count = info_->scope()->num_parameters() + 1;
456 int32_t sp_delta = arg_count * kPointerSize;
457 SetReturnPosition(literal());
458 masm_->mov(sp, fp);
459 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
460 masm_->Addu(sp, sp, Operand(sp_delta));
461 masm_->Jump(ra);
462 }
463 }
464}
465
Ben Murdochc5610432016-08-08 18:44:38 +0100466void FullCodeGenerator::RestoreContext() {
467 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
468}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469
470void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
471 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
472 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100473 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000474}
475
476
477void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
478}
479
480
481void FullCodeGenerator::AccumulatorValueContext::Plug(
482 Heap::RootListIndex index) const {
483 __ LoadRoot(result_register(), index);
484}
485
486
487void FullCodeGenerator::StackValueContext::Plug(
488 Heap::RootListIndex index) const {
489 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100490 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000491}
492
493
494void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
495 codegen()->PrepareForBailoutBeforeSplit(condition(),
496 true,
497 true_label_,
498 false_label_);
499 if (index == Heap::kUndefinedValueRootIndex ||
500 index == Heap::kNullValueRootIndex ||
501 index == Heap::kFalseValueRootIndex) {
502 if (false_label_ != fall_through_) __ Branch(false_label_);
503 } else if (index == Heap::kTrueValueRootIndex) {
504 if (true_label_ != fall_through_) __ Branch(true_label_);
505 } else {
506 __ LoadRoot(result_register(), index);
507 codegen()->DoTest(this);
508 }
509}
510
511
512void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
513}
514
515
516void FullCodeGenerator::AccumulatorValueContext::Plug(
517 Handle<Object> lit) const {
518 __ li(result_register(), Operand(lit));
519}
520
521
522void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
523 // Immediates cannot be pushed directly.
524 __ li(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100525 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000526}
527
528
529void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
530 codegen()->PrepareForBailoutBeforeSplit(condition(),
531 true,
532 true_label_,
533 false_label_);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100534 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
535 !lit->IsUndetectable());
536 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
537 lit->IsFalse(isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000538 if (false_label_ != fall_through_) __ Branch(false_label_);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100539 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540 if (true_label_ != fall_through_) __ Branch(true_label_);
541 } else if (lit->IsString()) {
542 if (String::cast(*lit)->length() == 0) {
543 if (false_label_ != fall_through_) __ Branch(false_label_);
544 } else {
545 if (true_label_ != fall_through_) __ Branch(true_label_);
546 }
547 } else if (lit->IsSmi()) {
548 if (Smi::cast(*lit)->value() == 0) {
549 if (false_label_ != fall_through_) __ Branch(false_label_);
550 } else {
551 if (true_label_ != fall_through_) __ Branch(true_label_);
552 }
553 } else {
554 // For simplicity we always test the accumulator register.
555 __ li(result_register(), Operand(lit));
556 codegen()->DoTest(this);
557 }
558}
559
560
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
562 Register reg) const {
563 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100564 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565 __ sw(reg, MemOperand(sp, 0));
566}
567
568
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
570 Label* materialize_false) const {
571 DCHECK(materialize_true == materialize_false);
572 __ bind(materialize_true);
573}
574
575
576void FullCodeGenerator::AccumulatorValueContext::Plug(
577 Label* materialize_true,
578 Label* materialize_false) const {
579 Label done;
580 __ bind(materialize_true);
581 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
582 __ Branch(&done);
583 __ bind(materialize_false);
584 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
585 __ bind(&done);
586}
587
588
589void FullCodeGenerator::StackValueContext::Plug(
590 Label* materialize_true,
591 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100592 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000593 Label done;
594 __ bind(materialize_true);
595 __ LoadRoot(at, Heap::kTrueValueRootIndex);
596 // Push the value as the following branch can clobber at in long branch mode.
597 __ push(at);
598 __ Branch(&done);
599 __ bind(materialize_false);
600 __ LoadRoot(at, Heap::kFalseValueRootIndex);
601 __ push(at);
602 __ bind(&done);
603}
604
605
606void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
607 Label* materialize_false) const {
608 DCHECK(materialize_true == true_label_);
609 DCHECK(materialize_false == false_label_);
610}
611
612
613void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
614 Heap::RootListIndex value_root_index =
615 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
616 __ LoadRoot(result_register(), value_root_index);
617}
618
619
620void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
621 Heap::RootListIndex value_root_index =
622 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
623 __ LoadRoot(at, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100624 codegen()->PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000625}
626
627
628void FullCodeGenerator::TestContext::Plug(bool flag) const {
629 codegen()->PrepareForBailoutBeforeSplit(condition(),
630 true,
631 true_label_,
632 false_label_);
633 if (flag) {
634 if (true_label_ != fall_through_) __ Branch(true_label_);
635 } else {
636 if (false_label_ != fall_through_) __ Branch(false_label_);
637 }
638}
639
640
641void FullCodeGenerator::DoTest(Expression* condition,
642 Label* if_true,
643 Label* if_false,
644 Label* fall_through) {
645 __ mov(a0, result_register());
Ben Murdochda12d292016-06-02 14:46:10 +0100646 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000647 CallIC(ic, condition->test_id());
648 __ LoadRoot(at, Heap::kTrueValueRootIndex);
649 Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
650}
651
652
653void FullCodeGenerator::Split(Condition cc,
654 Register lhs,
655 const Operand& rhs,
656 Label* if_true,
657 Label* if_false,
658 Label* fall_through) {
659 if (if_false == fall_through) {
660 __ Branch(if_true, cc, lhs, rhs);
661 } else if (if_true == fall_through) {
662 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
663 } else {
664 __ Branch(if_true, cc, lhs, rhs);
665 __ Branch(if_false);
666 }
667}
668
669
670MemOperand FullCodeGenerator::StackOperand(Variable* var) {
671 DCHECK(var->IsStackAllocated());
672 // Offset is negative because higher indexes are at lower addresses.
673 int offset = -var->index() * kPointerSize;
674 // Adjust by a (parameter or local) base offset.
675 if (var->IsParameter()) {
676 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
677 } else {
678 offset += JavaScriptFrameConstants::kLocal0Offset;
679 }
680 return MemOperand(fp, offset);
681}
682
683
684MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
685 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
686 if (var->IsContextSlot()) {
687 int context_chain_length = scope()->ContextChainLength(var->scope());
688 __ LoadContext(scratch, context_chain_length);
689 return ContextMemOperand(scratch, var->index());
690 } else {
691 return StackOperand(var);
692 }
693}
694
695
696void FullCodeGenerator::GetVar(Register dest, Variable* var) {
697 // Use destination as scratch.
698 MemOperand location = VarOperand(var, dest);
699 __ lw(dest, location);
700}
701
702
703void FullCodeGenerator::SetVar(Variable* var,
704 Register src,
705 Register scratch0,
706 Register scratch1) {
707 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
708 DCHECK(!scratch0.is(src));
709 DCHECK(!scratch0.is(scratch1));
710 DCHECK(!scratch1.is(src));
711 MemOperand location = VarOperand(var, scratch0);
712 __ sw(src, location);
713 // Emit the write barrier code if the location is in the heap.
714 if (var->IsContextSlot()) {
715 __ RecordWriteContextSlot(scratch0,
716 location.offset(),
717 src,
718 scratch1,
719 kRAHasBeenSaved,
720 kDontSaveFPRegs);
721 }
722}
723
724
725void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
726 bool should_normalize,
727 Label* if_true,
728 Label* if_false) {
729 // Only prepare for bailouts before splits if we're in a test
730 // context. Otherwise, we let the Visit function deal with the
731 // preparation to avoid preparing with the same AST id twice.
732 if (!context()->IsTest()) return;
733
734 Label skip;
735 if (should_normalize) __ Branch(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100736 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000737 if (should_normalize) {
738 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
739 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
740 __ bind(&skip);
741 }
742}
743
744
745void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
746 // The variable in the declaration always resides in the current function
747 // context.
748 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100749 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000750 // Check that we're not inside a with or catch context.
751 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
752 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
753 __ Check(ne, kDeclarationInWithContext,
754 a1, Operand(t0));
755 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
756 __ Check(ne, kDeclarationInCatchContext,
757 a1, Operand(t0));
758 }
759}
760
761
762void FullCodeGenerator::VisitVariableDeclaration(
763 VariableDeclaration* declaration) {
764 // If it was not possible to allocate the variable at compile time, we
765 // need to "declare" it at runtime to make sure it actually exists in the
766 // local context.
767 VariableProxy* proxy = declaration->proxy();
768 VariableMode mode = declaration->mode();
769 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100770 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000771 switch (variable->location()) {
772 case VariableLocation::GLOBAL:
773 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100774 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000775 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100776 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000777 break;
778
779 case VariableLocation::PARAMETER:
780 case VariableLocation::LOCAL:
781 if (hole_init) {
782 Comment cmnt(masm_, "[ VariableDeclaration");
783 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
784 __ sw(t0, StackOperand(variable));
785 }
786 break;
787
788 case VariableLocation::CONTEXT:
789 if (hole_init) {
790 Comment cmnt(masm_, "[ VariableDeclaration");
791 EmitDebugCheckDeclarationContext(variable);
792 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
793 __ sw(at, ContextMemOperand(cp, variable->index()));
794 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100795 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000796 }
797 break;
798
799 case VariableLocation::LOOKUP: {
800 Comment cmnt(masm_, "[ VariableDeclaration");
Ben Murdoch61f157c2016-09-16 13:49:30 +0100801 DCHECK_EQ(VAR, mode);
802 DCHECK(!hole_init);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000803 __ li(a2, Operand(variable->name()));
Ben Murdoch61f157c2016-09-16 13:49:30 +0100804 __ Push(a2);
805 __ CallRuntime(Runtime::kDeclareEvalVar);
Ben Murdochc5610432016-08-08 18:44:38 +0100806 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000807 break;
808 }
809 }
810}
811
812
813void FullCodeGenerator::VisitFunctionDeclaration(
814 FunctionDeclaration* declaration) {
815 VariableProxy* proxy = declaration->proxy();
816 Variable* variable = proxy->var();
817 switch (variable->location()) {
818 case VariableLocation::GLOBAL:
819 case VariableLocation::UNALLOCATED: {
820 globals_->Add(variable->name(), zone());
821 Handle<SharedFunctionInfo> function =
822 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
823 // Check for stack-overflow exception.
824 if (function.is_null()) return SetStackOverflow();
825 globals_->Add(function, zone());
826 break;
827 }
828
829 case VariableLocation::PARAMETER:
830 case VariableLocation::LOCAL: {
831 Comment cmnt(masm_, "[ FunctionDeclaration");
832 VisitForAccumulatorValue(declaration->fun());
833 __ sw(result_register(), StackOperand(variable));
834 break;
835 }
836
837 case VariableLocation::CONTEXT: {
838 Comment cmnt(masm_, "[ FunctionDeclaration");
839 EmitDebugCheckDeclarationContext(variable);
840 VisitForAccumulatorValue(declaration->fun());
841 __ sw(result_register(), ContextMemOperand(cp, variable->index()));
842 int offset = Context::SlotOffset(variable->index());
843 // We know that we have written a function, which is not a smi.
844 __ RecordWriteContextSlot(cp,
845 offset,
846 result_register(),
847 a2,
848 kRAHasBeenSaved,
849 kDontSaveFPRegs,
850 EMIT_REMEMBERED_SET,
851 OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100852 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000853 break;
854 }
855
856 case VariableLocation::LOOKUP: {
857 Comment cmnt(masm_, "[ FunctionDeclaration");
858 __ li(a2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100859 PushOperand(a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000860 // Push initial value for function declaration.
861 VisitForStackValue(declaration->fun());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100862 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
Ben Murdochc5610432016-08-08 18:44:38 +0100863 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000864 break;
865 }
866 }
867}
868
869
870void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
871 // Call the runtime to declare the globals.
872 __ li(a1, Operand(pairs));
873 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
874 __ Push(a1, a0);
875 __ CallRuntime(Runtime::kDeclareGlobals);
876 // Return value is ignored.
877}
878
879
880void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
881 // Call the runtime to declare the modules.
882 __ Push(descriptions);
883 __ CallRuntime(Runtime::kDeclareModules);
884 // Return value is ignored.
885}
886
887
888void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
889 Comment cmnt(masm_, "[ SwitchStatement");
890 Breakable nested_statement(this, stmt);
891 SetStatementPosition(stmt);
892
893 // Keep the switch value on the stack until a case matches.
894 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100895 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000896
897 ZoneList<CaseClause*>* clauses = stmt->cases();
898 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
899
900 Label next_test; // Recycled for each test.
901 // Compile all the tests with branches to their bodies.
902 for (int i = 0; i < clauses->length(); i++) {
903 CaseClause* clause = clauses->at(i);
904 clause->body_target()->Unuse();
905
906 // The default is not a test, but remember it as final fall through.
907 if (clause->is_default()) {
908 default_clause = clause;
909 continue;
910 }
911
912 Comment cmnt(masm_, "[ Case comparison");
913 __ bind(&next_test);
914 next_test.Unuse();
915
916 // Compile the label expression.
917 VisitForAccumulatorValue(clause->label());
918 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
919
920 // Perform the comparison as if via '==='.
921 __ lw(a1, MemOperand(sp, 0)); // Switch value.
922 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
923 JumpPatchSite patch_site(masm_);
924 if (inline_smi_code) {
925 Label slow_case;
926 __ or_(a2, a1, a0);
927 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
928
929 __ Branch(&next_test, ne, a1, Operand(a0));
930 __ Drop(1); // Switch value is no longer needed.
931 __ Branch(clause->body_target());
932
933 __ bind(&slow_case);
934 }
935
936 // Record position before stub call for type feedback.
937 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100938 Handle<Code> ic =
939 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000940 CallIC(ic, clause->CompareId());
941 patch_site.EmitPatchInfo();
942
943 Label skip;
944 __ Branch(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100945 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000946 __ LoadRoot(at, Heap::kTrueValueRootIndex);
947 __ Branch(&next_test, ne, v0, Operand(at));
948 __ Drop(1);
949 __ Branch(clause->body_target());
950 __ bind(&skip);
951
952 __ Branch(&next_test, ne, v0, Operand(zero_reg));
953 __ Drop(1); // Switch value is no longer needed.
954 __ Branch(clause->body_target());
955 }
956
957 // Discard the test value and jump to the default if present, otherwise to
958 // the end of the statement.
959 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100960 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000961 if (default_clause == NULL) {
962 __ Branch(nested_statement.break_label());
963 } else {
964 __ Branch(default_clause->body_target());
965 }
966
967 // Compile all the case bodies.
968 for (int i = 0; i < clauses->length(); i++) {
969 Comment cmnt(masm_, "[ Case body");
970 CaseClause* clause = clauses->at(i);
971 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100972 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000973 VisitStatements(clause->statements());
974 }
975
976 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100977 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000978}
979
980
981void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
982 Comment cmnt(masm_, "[ ForInStatement");
983 SetStatementPosition(stmt, SKIP_BREAK);
984
985 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
986
Ben Murdoch097c5b22016-05-18 11:27:45 +0100987 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000988 SetExpressionAsStatementPosition(stmt->enumerable());
989 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100990 __ mov(a0, result_register());
Ben Murdochda12d292016-06-02 14:46:10 +0100991 OperandStackDepthIncrement(5);
992
993 Label loop, exit;
994 Iteration loop_statement(this, stmt);
995 increment_loop_depth();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100996
997 // If the object is null or undefined, skip over the loop, otherwise convert
998 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999 Label convert, done_convert;
1000 __ JumpIfSmi(a0, &convert);
1001 __ GetObjectType(a0, a1, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001002 __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1003 Operand(FIRST_JS_RECEIVER_TYPE));
1004 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot.
1005 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1006 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot.
1007 __ Branch(&exit, eq, a0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001008 __ bind(&convert);
1009 ToObjectStub stub(isolate());
1010 __ CallStub(&stub);
1011 __ mov(a0, v0);
1012 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +01001013 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001014 __ push(a0);
1015
Ben Murdochc5610432016-08-08 18:44:38 +01001016 // Check cache validity in generated code. If we cannot guarantee cache
1017 // validity, call the runtime system to check cache validity or get the
1018 // property names in a fixed array. Note: Proxies never have an enum cache,
1019 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001020 Label call_runtime;
1021 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001022
1023 // The enum cache is valid. Load the map of the object being
1024 // iterated over and use the cache for the iteration.
1025 Label use_cache;
1026 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1027 __ Branch(&use_cache);
1028
1029 // Get the set of properties to enumerate.
1030 __ bind(&call_runtime);
1031 __ push(a0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001032 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +01001033 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001034
1035 // If we got a map from the runtime call, we can do a fast
1036 // modification check. Otherwise, we got a fixed array, and we have
1037 // to do a slow check.
1038 Label fixed_array;
1039 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1040 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1041 __ Branch(&fixed_array, ne, a2, Operand(at));
1042
1043 // We got a map in register v0. Get the enumeration cache from it.
1044 Label no_descriptors;
1045 __ bind(&use_cache);
1046
1047 __ EnumLength(a1, v0);
1048 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1049
1050 __ LoadInstanceDescriptors(v0, a2);
1051 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1052 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1053
1054 // Set up the four remaining stack slots.
1055 __ li(a0, Operand(Smi::FromInt(0)));
1056 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1057 __ Push(v0, a2, a1, a0);
1058 __ jmp(&loop);
1059
1060 __ bind(&no_descriptors);
1061 __ Drop(1);
1062 __ jmp(&exit);
1063
1064 // We got a fixed array in register v0. Iterate through that.
1065 __ bind(&fixed_array);
1066
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001067 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1068 __ Push(a1, v0); // Smi and array
1069 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001070 __ Push(a1); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001071 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001072 __ li(a0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001073 __ Push(a0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074
1075 // Generate code for doing the condition check.
1076 __ bind(&loop);
1077 SetExpressionAsStatementPosition(stmt->each());
1078
1079 // Load the current count to a0, load the length to a1.
1080 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1081 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1082 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1083
1084 // Get the current entry of the array into register a3.
1085 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1086 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001087 __ Lsa(t0, a2, a0, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088 __ lw(a3, MemOperand(t0)); // Current entry.
1089
1090 // Get the expected map from the stack or a smi in the
1091 // permanent slow case into register a2.
1092 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1093
1094 // Check if the expected map still matches that of the enumerable.
1095 // If not, we may have to filter the key.
1096 Label update_each;
1097 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1098 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1099 __ Branch(&update_each, eq, t0, Operand(a2));
1100
Ben Murdochda12d292016-06-02 14:46:10 +01001101 // We need to filter the key, record slow-path here.
1102 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001103 __ EmitLoadTypeFeedbackVector(a0);
1104 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1105 __ sw(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index)));
1106
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001107 // Convert the entry to a string or (smi) 0 if it isn't a property
1108 // any more. If the property has been removed while iterating, we
1109 // just skip it.
1110 __ Push(a1, a3); // Enumerable and current entry.
1111 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001112 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001113 __ mov(a3, result_register());
1114 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1115 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1116
1117 // Update the 'each' property or variable from the possibly filtered
1118 // entry in register a3.
1119 __ bind(&update_each);
1120 __ mov(result_register(), a3);
1121 // Perform the assignment as if via '='.
1122 { EffectContext context(this);
1123 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001124 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001125 }
1126
1127 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001128 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001129 // Generate code for the body of the loop.
1130 Visit(stmt->body());
1131
1132 // Generate code for the going to the next element by incrementing
1133 // the index (smi) stored on top of the stack.
1134 __ bind(loop_statement.continue_label());
1135 __ pop(a0);
1136 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1137 __ push(a0);
1138
1139 EmitBackEdgeBookkeeping(stmt, &loop);
1140 __ Branch(&loop);
1141
1142 // Remove the pointers stored on the stack.
1143 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001144 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001145
1146 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001147 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001148 __ bind(&exit);
1149 decrement_loop_depth();
1150}
1151
1152
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001153void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1154 FeedbackVectorSlot slot) {
1155 DCHECK(NeedsHomeObject(initializer));
1156 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1157 __ li(StoreDescriptor::NameRegister(),
1158 Operand(isolate()->factory()->home_object_symbol()));
1159 __ lw(StoreDescriptor::ValueRegister(),
1160 MemOperand(sp, offset * kPointerSize));
1161 EmitLoadStoreICSlot(slot);
1162 CallStoreIC();
1163}
1164
1165
1166void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1167 int offset,
1168 FeedbackVectorSlot slot) {
1169 DCHECK(NeedsHomeObject(initializer));
1170 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1171 __ li(StoreDescriptor::NameRegister(),
1172 Operand(isolate()->factory()->home_object_symbol()));
1173 __ lw(StoreDescriptor::ValueRegister(),
1174 MemOperand(sp, offset * kPointerSize));
1175 EmitLoadStoreICSlot(slot);
1176 CallStoreIC();
1177}
1178
1179
1180void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1181 TypeofMode typeof_mode,
1182 Label* slow) {
1183 Register current = cp;
1184 Register next = a1;
1185 Register temp = a2;
1186
1187 Scope* s = scope();
1188 while (s != NULL) {
1189 if (s->num_heap_slots() > 0) {
1190 if (s->calls_sloppy_eval()) {
1191 // Check that extension is "the hole".
1192 __ lw(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1193 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1194 }
1195 // Load next context in chain.
1196 __ lw(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1197 // Walk the rest of the chain without clobbering cp.
1198 current = next;
1199 }
1200 // If no outer scope calls eval, we do not need to check more
1201 // context extensions.
1202 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1203 s = s->outer_scope();
1204 }
1205
1206 if (s->is_eval_scope()) {
1207 Label loop, fast;
1208 if (!current.is(next)) {
1209 __ Move(next, current);
1210 }
1211 __ bind(&loop);
1212 // Terminate at native context.
1213 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1214 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1215 __ Branch(&fast, eq, temp, Operand(t0));
1216 // Check that extension is "the hole".
1217 __ lw(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1218 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1219 // Load next context in chain.
1220 __ lw(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1221 __ Branch(&loop);
1222 __ bind(&fast);
1223 }
1224
1225 // All extension objects were empty and it is safe to use a normal global
1226 // load machinery.
1227 EmitGlobalVariableLoad(proxy, typeof_mode);
1228}
1229
1230
1231MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1232 Label* slow) {
1233 DCHECK(var->IsContextSlot());
1234 Register context = cp;
1235 Register next = a3;
1236 Register temp = t0;
1237
1238 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1239 if (s->num_heap_slots() > 0) {
1240 if (s->calls_sloppy_eval()) {
1241 // Check that extension is "the hole".
1242 __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1243 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1244 }
1245 __ lw(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1246 // Walk the rest of the chain without clobbering cp.
1247 context = next;
1248 }
1249 }
1250 // Check that last extension is "the hole".
1251 __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1252 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1253
1254 // This function is used only for loads, not stores, so it's safe to
1255 // return an cp-based operand (the write barrier cannot be allowed to
1256 // destroy the cp register).
1257 return ContextMemOperand(context, var->index());
1258}
1259
1260
1261void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1262 TypeofMode typeof_mode,
1263 Label* slow, Label* done) {
1264 // Generate fast-case code for variables that might be shadowed by
1265 // eval-introduced variables. Eval is used a lot without
1266 // introducing variables. In those cases, we do not want to
1267 // perform a runtime call for all variables in the scope
1268 // containing the eval.
1269 Variable* var = proxy->var();
1270 if (var->mode() == DYNAMIC_GLOBAL) {
1271 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1272 __ Branch(done);
1273 } else if (var->mode() == DYNAMIC_LOCAL) {
1274 Variable* local = var->local_if_not_shadowed();
1275 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001276 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001277 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1278 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
Ben Murdochc5610432016-08-08 18:44:38 +01001279 __ Branch(done, ne, at, Operand(zero_reg));
1280 __ li(a0, Operand(var->name()));
1281 __ push(a0);
1282 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001283 }
1284 __ Branch(done);
1285 }
1286}
1287
1288
1289void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1290 TypeofMode typeof_mode) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001291#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001292 Variable* var = proxy->var();
1293 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1294 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001295#endif
1296 __ li(LoadGlobalDescriptor::SlotRegister(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001297 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001298 CallLoadGlobalIC(typeof_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001299}
1300
1301
1302void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1303 TypeofMode typeof_mode) {
1304 // Record position before possible IC call.
1305 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001306 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001307 Variable* var = proxy->var();
1308
1309 // Three cases: global variables, lookup variables, and all other types of
1310 // variables.
1311 switch (var->location()) {
1312 case VariableLocation::GLOBAL:
1313 case VariableLocation::UNALLOCATED: {
1314 Comment cmnt(masm_, "[ Global variable");
1315 EmitGlobalVariableLoad(proxy, typeof_mode);
1316 context()->Plug(v0);
1317 break;
1318 }
1319
1320 case VariableLocation::PARAMETER:
1321 case VariableLocation::LOCAL:
1322 case VariableLocation::CONTEXT: {
1323 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1324 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1325 : "[ Stack variable");
1326 if (NeedsHoleCheckForLoad(proxy)) {
1327 // Let and const need a read barrier.
1328 GetVar(v0, var);
1329 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1330 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1331 if (var->mode() == LET || var->mode() == CONST) {
1332 // Throw a reference error when using an uninitialized let/const
1333 // binding in harmony mode.
1334 Label done;
1335 __ Branch(&done, ne, at, Operand(zero_reg));
1336 __ li(a0, Operand(var->name()));
1337 __ push(a0);
1338 __ CallRuntime(Runtime::kThrowReferenceError);
1339 __ bind(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001340 }
1341 context()->Plug(v0);
1342 break;
1343 }
1344 context()->Plug(var);
1345 break;
1346 }
1347
1348 case VariableLocation::LOOKUP: {
1349 Comment cmnt(masm_, "[ Lookup variable");
1350 Label done, slow;
1351 // Generate code for loading from variables potentially shadowed
1352 // by eval-introduced variables.
1353 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1354 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001355 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001356 Runtime::FunctionId function_id =
1357 typeof_mode == NOT_INSIDE_TYPEOF
1358 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001359 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001360 __ CallRuntime(function_id);
1361 __ bind(&done);
1362 context()->Plug(v0);
1363 }
1364 }
1365}
1366
1367
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001368void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1369 Expression* expression = (property == NULL) ? NULL : property->value();
1370 if (expression == NULL) {
1371 __ LoadRoot(a1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001372 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001373 } else {
1374 VisitForStackValue(expression);
1375 if (NeedsHomeObject(expression)) {
1376 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1377 property->kind() == ObjectLiteral::Property::SETTER);
1378 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1379 EmitSetHomeObject(expression, offset, property->GetSlot());
1380 }
1381 }
1382}
1383
1384
1385void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1386 Comment cmnt(masm_, "[ ObjectLiteral");
1387
1388 Handle<FixedArray> constant_properties = expr->constant_properties();
1389 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1390 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1391 __ li(a1, Operand(constant_properties));
1392 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1393 if (MustCreateObjectLiteralWithRuntime(expr)) {
1394 __ Push(a3, a2, a1, a0);
1395 __ CallRuntime(Runtime::kCreateObjectLiteral);
1396 } else {
1397 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1398 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001399 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001400 }
Ben Murdochc5610432016-08-08 18:44:38 +01001401 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001402
1403 // If result_saved is true the result is on top of the stack. If
1404 // result_saved is false the result is in v0.
1405 bool result_saved = false;
1406
1407 AccessorTable accessor_table(zone());
1408 int property_index = 0;
1409 for (; property_index < expr->properties()->length(); property_index++) {
1410 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1411 if (property->is_computed_name()) break;
1412 if (property->IsCompileTimeValue()) continue;
1413
1414 Literal* key = property->key()->AsLiteral();
1415 Expression* value = property->value();
1416 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001417 PushOperand(v0); // Save result on stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 result_saved = true;
1419 }
1420 switch (property->kind()) {
1421 case ObjectLiteral::Property::CONSTANT:
1422 UNREACHABLE();
1423 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1424 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1425 // Fall through.
1426 case ObjectLiteral::Property::COMPUTED:
1427 // It is safe to use [[Put]] here because the boilerplate already
1428 // contains computed properties with an uninitialized value.
1429 if (key->value()->IsInternalizedString()) {
1430 if (property->emit_store()) {
1431 VisitForAccumulatorValue(value);
1432 __ mov(StoreDescriptor::ValueRegister(), result_register());
1433 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1434 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1435 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1436 EmitLoadStoreICSlot(property->GetSlot(0));
1437 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001438 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001439
1440 if (NeedsHomeObject(value)) {
1441 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1442 }
1443 } else {
1444 VisitForEffect(value);
1445 }
1446 break;
1447 }
1448 // Duplicate receiver on stack.
1449 __ lw(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001450 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001451 VisitForStackValue(key);
1452 VisitForStackValue(value);
1453 if (property->emit_store()) {
1454 if (NeedsHomeObject(value)) {
1455 EmitSetHomeObject(value, 2, property->GetSlot());
1456 }
1457 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001458 PushOperand(a0);
1459 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001460 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001461 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001462 }
1463 break;
1464 case ObjectLiteral::Property::PROTOTYPE:
1465 // Duplicate receiver on stack.
1466 __ lw(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001467 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001468 VisitForStackValue(value);
1469 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001470 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001471 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001472 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001473 break;
1474 case ObjectLiteral::Property::GETTER:
1475 if (property->emit_store()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001476 AccessorTable::Iterator it = accessor_table.lookup(key);
1477 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1478 it->second->getter = property;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001479 }
1480 break;
1481 case ObjectLiteral::Property::SETTER:
1482 if (property->emit_store()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001483 AccessorTable::Iterator it = accessor_table.lookup(key);
1484 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1485 it->second->setter = property;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001486 }
1487 break;
1488 }
1489 }
1490
1491 // Emit code to define accessors, using only a single call to the runtime for
1492 // each pair of corresponding getters and setters.
1493 for (AccessorTable::Iterator it = accessor_table.begin();
1494 it != accessor_table.end();
1495 ++it) {
1496 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001497 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001498 VisitForStackValue(it->first);
1499 EmitAccessor(it->second->getter);
1500 EmitAccessor(it->second->setter);
1501 __ li(a0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001502 PushOperand(a0);
1503 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001504 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001505 }
1506
1507 // Object literals have two parts. The "static" part on the left contains no
1508 // computed property names, and so we can compute its map ahead of time; see
1509 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1510 // starts with the first computed property name, and continues with all
1511 // properties to its right. All the code from above initializes the static
1512 // component of the object literal, and arranges for the map of the result to
1513 // reflect the static order in which the keys appear. For the dynamic
1514 // properties, we compile them into a series of "SetOwnProperty" runtime
1515 // calls. This will preserve insertion order.
1516 for (; property_index < expr->properties()->length(); property_index++) {
1517 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1518
1519 Expression* value = property->value();
1520 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001521 PushOperand(v0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001522 result_saved = true;
1523 }
1524
1525 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001526 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001527
1528 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1529 DCHECK(!property->is_computed_name());
1530 VisitForStackValue(value);
1531 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001532 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001533 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001534 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535 } else {
1536 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1537 VisitForStackValue(value);
1538 if (NeedsHomeObject(value)) {
1539 EmitSetHomeObject(value, 2, property->GetSlot());
1540 }
1541
1542 switch (property->kind()) {
1543 case ObjectLiteral::Property::CONSTANT:
1544 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1545 case ObjectLiteral::Property::COMPUTED:
1546 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001547 PushOperand(Smi::FromInt(NONE));
1548 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1549 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001550 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1551 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001553 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 }
1555 break;
1556
1557 case ObjectLiteral::Property::PROTOTYPE:
1558 UNREACHABLE();
1559 break;
1560
1561 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001562 PushOperand(Smi::FromInt(NONE));
1563 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001564 break;
1565
1566 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001567 PushOperand(Smi::FromInt(NONE));
1568 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001569 break;
1570 }
1571 }
1572 }
1573
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001574 if (result_saved) {
1575 context()->PlugTOS();
1576 } else {
1577 context()->Plug(v0);
1578 }
1579}
1580
1581
1582void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1583 Comment cmnt(masm_, "[ ArrayLiteral");
1584
1585 Handle<FixedArray> constant_elements = expr->constant_elements();
1586 bool has_fast_elements =
1587 IsFastObjectElementsKind(expr->constant_elements_kind());
1588
1589 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1590 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1591 // If the only customer of allocation sites is transitioning, then
1592 // we can turn it off if we don't have anywhere else to transition to.
1593 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1594 }
1595
1596 __ mov(a0, result_register());
1597 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1598 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1599 __ li(a1, Operand(constant_elements));
1600 if (MustCreateArrayLiteralWithRuntime(expr)) {
1601 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1602 __ Push(a3, a2, a1, a0);
1603 __ CallRuntime(Runtime::kCreateArrayLiteral);
1604 } else {
1605 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1606 __ CallStub(&stub);
1607 }
Ben Murdochc5610432016-08-08 18:44:38 +01001608 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609
1610 bool result_saved = false; // Is the result saved to the stack?
1611 ZoneList<Expression*>* subexprs = expr->values();
1612 int length = subexprs->length();
1613
1614 // Emit code to evaluate all the non-constant subexpressions and to store
1615 // them into the newly cloned array.
1616 int array_index = 0;
1617 for (; array_index < length; array_index++) {
1618 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001619 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001620
1621 // If the subexpression is a literal or a simple materialized literal it
1622 // is already set in the cloned array.
1623 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1624
1625 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001626 PushOperand(v0); // array literal
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627 result_saved = true;
1628 }
1629
1630 VisitForAccumulatorValue(subexpr);
1631
1632 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1633 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1634 __ mov(StoreDescriptor::ValueRegister(), result_register());
1635 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1636 Handle<Code> ic =
1637 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1638 CallIC(ic);
1639
Ben Murdochc5610432016-08-08 18:44:38 +01001640 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1641 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001642 }
1643
1644 // In case the array literal contains spread expressions it has two parts. The
1645 // first part is the "static" array which has a literal index is handled
1646 // above. The second part is the part after the first spread expression
1647 // (inclusive) and these elements gets appended to the array. Note that the
1648 // number elements an iterable produces is unknown ahead of time.
1649 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001650 PopOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001651 result_saved = false;
1652 }
1653 for (; array_index < length; array_index++) {
1654 Expression* subexpr = subexprs->at(array_index);
1655
Ben Murdoch097c5b22016-05-18 11:27:45 +01001656 PushOperand(v0);
1657 DCHECK(!subexpr->IsSpread());
1658 VisitForStackValue(subexpr);
1659 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001660
Ben Murdochc5610432016-08-08 18:44:38 +01001661 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1662 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001663 }
1664
1665 if (result_saved) {
1666 context()->PlugTOS();
1667 } else {
1668 context()->Plug(v0);
1669 }
1670}
1671
1672
1673void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1674 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1675
1676 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001677
1678 Property* property = expr->target()->AsProperty();
1679 LhsKind assign_type = Property::GetAssignType(property);
1680
1681 // Evaluate LHS expression.
1682 switch (assign_type) {
1683 case VARIABLE:
1684 // Nothing to do here.
1685 break;
1686 case NAMED_PROPERTY:
1687 if (expr->is_compound()) {
1688 // We need the receiver both on the stack and in the register.
1689 VisitForStackValue(property->obj());
1690 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1691 } else {
1692 VisitForStackValue(property->obj());
1693 }
1694 break;
1695 case NAMED_SUPER_PROPERTY:
1696 VisitForStackValue(
1697 property->obj()->AsSuperPropertyReference()->this_var());
1698 VisitForAccumulatorValue(
1699 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001700 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701 if (expr->is_compound()) {
1702 const Register scratch = a1;
1703 __ lw(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001704 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705 }
1706 break;
1707 case KEYED_SUPER_PROPERTY: {
1708 const Register scratch = a1;
1709 VisitForStackValue(
1710 property->obj()->AsSuperPropertyReference()->this_var());
1711 VisitForAccumulatorValue(
1712 property->obj()->AsSuperPropertyReference()->home_object());
1713 __ Move(scratch, result_register());
1714 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001715 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 if (expr->is_compound()) {
1717 const Register scratch1 = t0;
1718 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001719 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001720 }
1721 break;
1722 }
1723 case KEYED_PROPERTY:
1724 // We need the key and receiver on both the stack and in v0 and a1.
1725 if (expr->is_compound()) {
1726 VisitForStackValue(property->obj());
1727 VisitForStackValue(property->key());
1728 __ lw(LoadDescriptor::ReceiverRegister(),
1729 MemOperand(sp, 1 * kPointerSize));
1730 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1731 } else {
1732 VisitForStackValue(property->obj());
1733 VisitForStackValue(property->key());
1734 }
1735 break;
1736 }
1737
1738 // For compound assignments we need another deoptimization point after the
1739 // variable/property load.
1740 if (expr->is_compound()) {
1741 { AccumulatorValueContext context(this);
1742 switch (assign_type) {
1743 case VARIABLE:
1744 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001745 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001746 break;
1747 case NAMED_PROPERTY:
1748 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001749 PrepareForBailoutForId(property->LoadId(),
1750 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 break;
1752 case NAMED_SUPER_PROPERTY:
1753 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001754 PrepareForBailoutForId(property->LoadId(),
1755 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001756 break;
1757 case KEYED_SUPER_PROPERTY:
1758 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001759 PrepareForBailoutForId(property->LoadId(),
1760 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001761 break;
1762 case KEYED_PROPERTY:
1763 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001764 PrepareForBailoutForId(property->LoadId(),
1765 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001766 break;
1767 }
1768 }
1769
1770 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001771 PushOperand(v0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001772 VisitForAccumulatorValue(expr->value());
1773
1774 AccumulatorValueContext context(this);
1775 if (ShouldInlineSmiCase(op)) {
1776 EmitInlineSmiBinaryOp(expr->binary_operation(),
1777 op,
1778 expr->target(),
1779 expr->value());
1780 } else {
1781 EmitBinaryOp(expr->binary_operation(), op);
1782 }
1783
1784 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001785 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001786 } else {
1787 VisitForAccumulatorValue(expr->value());
1788 }
1789
1790 SetExpressionPosition(expr);
1791
1792 // Store the value.
1793 switch (assign_type) {
1794 case VARIABLE:
1795 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1796 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001797 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001798 context()->Plug(v0);
1799 break;
1800 case NAMED_PROPERTY:
1801 EmitNamedPropertyAssignment(expr);
1802 break;
1803 case NAMED_SUPER_PROPERTY:
1804 EmitNamedSuperPropertyStore(property);
1805 context()->Plug(v0);
1806 break;
1807 case KEYED_SUPER_PROPERTY:
1808 EmitKeyedSuperPropertyStore(property);
1809 context()->Plug(v0);
1810 break;
1811 case KEYED_PROPERTY:
1812 EmitKeyedPropertyAssignment(expr);
1813 break;
1814 }
1815}
1816
1817
1818void FullCodeGenerator::VisitYield(Yield* expr) {
1819 Comment cmnt(masm_, "[ Yield");
1820 SetExpressionPosition(expr);
1821
1822 // Evaluate yielded value first; the initial iterator definition depends on
1823 // this. It stays on the stack while we update the iterator.
1824 VisitForStackValue(expr->expression());
1825
Ben Murdochc5610432016-08-08 18:44:38 +01001826 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827
Ben Murdochda12d292016-06-02 14:46:10 +01001828 __ jmp(&suspend);
1829 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001830 // When we arrive here, v0 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001831 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001832 __ lw(a1, FieldMemOperand(v0, JSGeneratorObject::kResumeModeOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001833 __ lw(v0, FieldMemOperand(v0, JSGeneratorObject::kInputOrDebugPosOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01001834 __ Branch(&resume, eq, a1, Operand(Smi::FromInt(JSGeneratorObject::kNext)));
1835 __ Push(result_register());
1836 __ Branch(&exception, eq, a1,
1837 Operand(Smi::FromInt(JSGeneratorObject::kThrow)));
Ben Murdochda12d292016-06-02 14:46:10 +01001838 EmitCreateIteratorResult(true);
1839 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001840
Ben Murdochc5610432016-08-08 18:44:38 +01001841 __ bind(&exception);
1842 __ CallRuntime(Runtime::kThrow);
1843
Ben Murdochda12d292016-06-02 14:46:10 +01001844 __ bind(&suspend);
1845 OperandStackDepthIncrement(1); // Not popped on this path.
1846 VisitForAccumulatorValue(expr->generator_object());
1847 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1848 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1849 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1850 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1851 __ mov(a1, cp);
1852 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1853 kRAHasBeenSaved, kDontSaveFPRegs);
1854 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1855 __ Branch(&post_runtime, eq, sp, Operand(a1));
1856 __ push(v0); // generator object
1857 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001858 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001859 __ bind(&post_runtime);
1860 PopOperand(result_register());
1861 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001862
Ben Murdochda12d292016-06-02 14:46:10 +01001863 __ bind(&resume);
1864 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001865}
1866
Ben Murdoch097c5b22016-05-18 11:27:45 +01001867void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1868 OperandStackDepthIncrement(2);
1869 __ Push(reg1, reg2);
1870}
1871
1872void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1873 Register reg3) {
1874 OperandStackDepthIncrement(3);
1875 __ Push(reg1, reg2, reg3);
1876}
1877
1878void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1879 Register reg3, Register reg4) {
1880 OperandStackDepthIncrement(4);
1881 __ Push(reg1, reg2, reg3, reg4);
1882}
1883
1884void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1885 OperandStackDepthDecrement(2);
1886 __ Pop(reg1, reg2);
1887}
1888
1889void FullCodeGenerator::EmitOperandStackDepthCheck() {
1890 if (FLAG_debug_code) {
1891 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1892 operand_stack_depth_ * kPointerSize;
1893 __ Subu(v0, fp, sp);
1894 __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
1895 }
1896}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001897
1898void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1899 Label allocate, done_allocate;
1900
Ben Murdochc5610432016-08-08 18:44:38 +01001901 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
1902 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001903 __ jmp(&done_allocate);
1904
1905 __ bind(&allocate);
1906 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1907 __ CallRuntime(Runtime::kAllocateInNewSpace);
1908
1909 __ bind(&done_allocate);
1910 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
Ben Murdochda12d292016-06-02 14:46:10 +01001911 PopOperand(a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001912 __ LoadRoot(a3,
1913 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1914 __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
1915 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
1916 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
1917 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
1918 __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
1919 __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
1920 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1921}
1922
1923
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001924void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1925 Token::Value op,
1926 Expression* left_expr,
1927 Expression* right_expr) {
1928 Label done, smi_case, stub_call;
1929
1930 Register scratch1 = a2;
1931 Register scratch2 = a3;
1932
1933 // Get the arguments.
1934 Register left = a1;
1935 Register right = a0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001936 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001937 __ mov(a0, result_register());
1938
1939 // Perform combined smi check on both operands.
1940 __ Or(scratch1, left, Operand(right));
1941 STATIC_ASSERT(kSmiTag == 0);
1942 JumpPatchSite patch_site(masm_);
1943 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1944
1945 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001946 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001947 CallIC(code, expr->BinaryOperationFeedbackId());
1948 patch_site.EmitPatchInfo();
1949 __ jmp(&done);
1950
1951 __ bind(&smi_case);
1952 // Smi case. This code works the same way as the smi-smi case in the type
1953 // recording binary operation stub, see
1954 switch (op) {
1955 case Token::SAR:
1956 __ GetLeastBitsFromSmi(scratch1, right, 5);
1957 __ srav(right, left, scratch1);
1958 __ And(v0, right, Operand(~kSmiTagMask));
1959 break;
1960 case Token::SHL: {
1961 __ SmiUntag(scratch1, left);
1962 __ GetLeastBitsFromSmi(scratch2, right, 5);
1963 __ sllv(scratch1, scratch1, scratch2);
1964 __ Addu(scratch2, scratch1, Operand(0x40000000));
1965 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1966 __ SmiTag(v0, scratch1);
1967 break;
1968 }
1969 case Token::SHR: {
1970 __ SmiUntag(scratch1, left);
1971 __ GetLeastBitsFromSmi(scratch2, right, 5);
1972 __ srlv(scratch1, scratch1, scratch2);
1973 __ And(scratch2, scratch1, 0xc0000000);
1974 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1975 __ SmiTag(v0, scratch1);
1976 break;
1977 }
1978 case Token::ADD:
1979 __ AddBranchOvf(v0, left, Operand(right), &stub_call);
1980 break;
1981 case Token::SUB:
1982 __ SubBranchOvf(v0, left, Operand(right), &stub_call);
1983 break;
1984 case Token::MUL: {
1985 __ SmiUntag(scratch1, right);
1986 __ Mul(scratch2, v0, left, scratch1);
1987 __ sra(scratch1, v0, 31);
1988 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
1989 __ Branch(&done, ne, v0, Operand(zero_reg));
1990 __ Addu(scratch2, right, left);
1991 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1992 DCHECK(Smi::FromInt(0) == 0);
1993 __ mov(v0, zero_reg);
1994 break;
1995 }
1996 case Token::BIT_OR:
1997 __ Or(v0, left, Operand(right));
1998 break;
1999 case Token::BIT_AND:
2000 __ And(v0, left, Operand(right));
2001 break;
2002 case Token::BIT_XOR:
2003 __ Xor(v0, left, Operand(right));
2004 break;
2005 default:
2006 UNREACHABLE();
2007 }
2008
2009 __ bind(&done);
2010 context()->Plug(v0);
2011}
2012
2013
2014void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002015 for (int i = 0; i < lit->properties()->length(); i++) {
2016 ObjectLiteral::Property* property = lit->properties()->at(i);
2017 Expression* value = property->value();
2018
Ben Murdoch097c5b22016-05-18 11:27:45 +01002019 Register scratch = a1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002020 if (property->is_static()) {
2021 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
2022 } else {
2023 __ lw(scratch, MemOperand(sp, 0)); // prototype
2024 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002025 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002026 EmitPropertyKey(property, lit->GetIdForProperty(i));
2027
2028 // The static prototype property is read only. We handle the non computed
2029 // property name case in the parser. Since this is the only case where we
2030 // need to check for an own read only property we special case this so we do
2031 // not need to do this for every property.
2032 if (property->is_static() && property->is_computed_name()) {
2033 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2034 __ push(v0);
2035 }
2036
2037 VisitForStackValue(value);
2038 if (NeedsHomeObject(value)) {
2039 EmitSetHomeObject(value, 2, property->GetSlot());
2040 }
2041
2042 switch (property->kind()) {
2043 case ObjectLiteral::Property::CONSTANT:
2044 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2045 case ObjectLiteral::Property::PROTOTYPE:
2046 UNREACHABLE();
2047 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002048 PushOperand(Smi::FromInt(DONT_ENUM));
2049 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2050 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002051 break;
2052
2053 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002054 PushOperand(Smi::FromInt(DONT_ENUM));
2055 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002056 break;
2057
2058 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002059 PushOperand(Smi::FromInt(DONT_ENUM));
2060 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002061 break;
2062
2063 default:
2064 UNREACHABLE();
2065 }
2066 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002067}
2068
2069
2070void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2071 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002072 PopOperand(a1);
2073 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002074 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2075 CallIC(code, expr->BinaryOperationFeedbackId());
2076 patch_site.EmitPatchInfo();
2077 context()->Plug(v0);
2078}
2079
2080
2081void FullCodeGenerator::EmitAssignment(Expression* expr,
2082 FeedbackVectorSlot slot) {
2083 DCHECK(expr->IsValidReferenceExpressionOrThis());
2084
2085 Property* prop = expr->AsProperty();
2086 LhsKind assign_type = Property::GetAssignType(prop);
2087
2088 switch (assign_type) {
2089 case VARIABLE: {
2090 Variable* var = expr->AsVariableProxy()->var();
2091 EffectContext context(this);
2092 EmitVariableAssignment(var, Token::ASSIGN, slot);
2093 break;
2094 }
2095 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002096 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002097 VisitForAccumulatorValue(prop->obj());
2098 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002099 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002100 __ li(StoreDescriptor::NameRegister(),
2101 Operand(prop->key()->AsLiteral()->value()));
2102 EmitLoadStoreICSlot(slot);
2103 CallStoreIC();
2104 break;
2105 }
2106 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002107 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002108 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2109 VisitForAccumulatorValue(
2110 prop->obj()->AsSuperPropertyReference()->home_object());
2111 // stack: value, this; v0: home_object
2112 Register scratch = a2;
2113 Register scratch2 = a3;
2114 __ mov(scratch, result_register()); // home_object
2115 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2116 __ lw(scratch2, MemOperand(sp, 0)); // this
2117 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2118 __ sw(scratch, MemOperand(sp, 0)); // home_object
2119 // stack: this, home_object; v0: value
2120 EmitNamedSuperPropertyStore(prop);
2121 break;
2122 }
2123 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002124 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002125 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2126 VisitForStackValue(
2127 prop->obj()->AsSuperPropertyReference()->home_object());
2128 VisitForAccumulatorValue(prop->key());
2129 Register scratch = a2;
2130 Register scratch2 = a3;
2131 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2132 // stack: value, this, home_object; v0: key, a3: value
2133 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2134 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2135 __ lw(scratch, MemOperand(sp, 0)); // home_object
2136 __ sw(scratch, MemOperand(sp, kPointerSize));
2137 __ sw(v0, MemOperand(sp, 0));
2138 __ Move(v0, scratch2);
2139 // stack: this, home_object, key; v0: value.
2140 EmitKeyedSuperPropertyStore(prop);
2141 break;
2142 }
2143 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002144 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002145 VisitForStackValue(prop->obj());
2146 VisitForAccumulatorValue(prop->key());
2147 __ mov(StoreDescriptor::NameRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002148 PopOperands(StoreDescriptor::ValueRegister(),
2149 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002150 EmitLoadStoreICSlot(slot);
2151 Handle<Code> ic =
2152 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2153 CallIC(ic);
2154 break;
2155 }
2156 }
2157 context()->Plug(v0);
2158}
2159
2160
2161void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2162 Variable* var, MemOperand location) {
2163 __ sw(result_register(), location);
2164 if (var->IsContextSlot()) {
2165 // RecordWrite may destroy all its register arguments.
2166 __ Move(a3, result_register());
2167 int offset = Context::SlotOffset(var->index());
2168 __ RecordWriteContextSlot(
2169 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2170 }
2171}
2172
2173
2174void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2175 FeedbackVectorSlot slot) {
2176 if (var->IsUnallocated()) {
2177 // Global var, const, or let.
2178 __ mov(StoreDescriptor::ValueRegister(), result_register());
2179 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2180 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2181 EmitLoadStoreICSlot(slot);
2182 CallStoreIC();
2183
2184 } else if (var->mode() == LET && op != Token::INIT) {
2185 // Non-initializing assignment to let variable needs a write barrier.
2186 DCHECK(!var->IsLookupSlot());
2187 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2188 Label assign;
2189 MemOperand location = VarOperand(var, a1);
2190 __ lw(a3, location);
2191 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2192 __ Branch(&assign, ne, a3, Operand(t0));
2193 __ li(a3, Operand(var->name()));
2194 __ push(a3);
2195 __ CallRuntime(Runtime::kThrowReferenceError);
2196 // Perform the assignment.
2197 __ bind(&assign);
2198 EmitStoreToStackLocalOrContextSlot(var, location);
2199
2200 } else if (var->mode() == CONST && op != Token::INIT) {
2201 // Assignment to const variable needs a write barrier.
2202 DCHECK(!var->IsLookupSlot());
2203 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2204 Label const_error;
2205 MemOperand location = VarOperand(var, a1);
2206 __ lw(a3, location);
2207 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2208 __ Branch(&const_error, ne, a3, Operand(at));
2209 __ li(a3, Operand(var->name()));
2210 __ push(a3);
2211 __ CallRuntime(Runtime::kThrowReferenceError);
2212 __ bind(&const_error);
2213 __ CallRuntime(Runtime::kThrowConstAssignError);
2214
2215 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2216 // Initializing assignment to const {this} needs a write barrier.
2217 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2218 Label uninitialized_this;
2219 MemOperand location = VarOperand(var, a1);
2220 __ lw(a3, location);
2221 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2222 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2223 __ li(a0, Operand(var->name()));
2224 __ Push(a0);
2225 __ CallRuntime(Runtime::kThrowReferenceError);
2226 __ bind(&uninitialized_this);
2227 EmitStoreToStackLocalOrContextSlot(var, location);
2228
Ben Murdochc5610432016-08-08 18:44:38 +01002229 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002230 if (var->IsLookupSlot()) {
2231 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002232 __ Push(var->name());
2233 __ Push(v0);
2234 __ CallRuntime(is_strict(language_mode())
2235 ? Runtime::kStoreLookupSlot_Strict
2236 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 } else {
2238 // Assignment to var or initializing assignment to let/const in harmony
2239 // mode.
2240 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2241 MemOperand location = VarOperand(var, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002242 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002243 // Check for an uninitialized let binding.
2244 __ lw(a2, location);
2245 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2246 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2247 }
2248 EmitStoreToStackLocalOrContextSlot(var, location);
2249 }
2250
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251 } else {
2252 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2253 if (is_strict(language_mode())) {
2254 __ CallRuntime(Runtime::kThrowConstAssignError);
2255 }
2256 // Silently ignore store in sloppy mode.
2257 }
2258}
2259
2260
2261void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2262 // Assignment to a property, using a named store IC.
2263 Property* prop = expr->target()->AsProperty();
2264 DCHECK(prop != NULL);
2265 DCHECK(prop->key()->IsLiteral());
2266
2267 __ mov(StoreDescriptor::ValueRegister(), result_register());
2268 __ li(StoreDescriptor::NameRegister(),
2269 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002270 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002271 EmitLoadStoreICSlot(expr->AssignmentSlot());
2272 CallStoreIC();
2273
Ben Murdochc5610432016-08-08 18:44:38 +01002274 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002275 context()->Plug(v0);
2276}
2277
2278
2279void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2280 // Assignment to named property of super.
2281 // v0 : value
2282 // stack : receiver ('this'), home_object
2283 DCHECK(prop != NULL);
2284 Literal* key = prop->key()->AsLiteral();
2285 DCHECK(key != NULL);
2286
Ben Murdoch097c5b22016-05-18 11:27:45 +01002287 PushOperand(key->value());
2288 PushOperand(v0);
2289 CallRuntimeWithOperands(is_strict(language_mode())
2290 ? Runtime::kStoreToSuper_Strict
2291 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002292}
2293
2294
2295void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2296 // Assignment to named property of super.
2297 // v0 : value
2298 // stack : receiver ('this'), home_object, key
2299 DCHECK(prop != NULL);
2300
Ben Murdoch097c5b22016-05-18 11:27:45 +01002301 PushOperand(v0);
2302 CallRuntimeWithOperands(is_strict(language_mode())
2303 ? Runtime::kStoreKeyedToSuper_Strict
2304 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002305}
2306
2307
2308void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2309 // Assignment to a property, using a keyed store IC.
2310 // Call keyed store IC.
2311 // The arguments are:
2312 // - a0 is the value,
2313 // - a1 is the key,
2314 // - a2 is the receiver.
2315 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002316 PopOperands(StoreDescriptor::ReceiverRegister(),
2317 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002318 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2319
2320 Handle<Code> ic =
2321 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2322 EmitLoadStoreICSlot(expr->AssignmentSlot());
2323 CallIC(ic);
2324
Ben Murdochc5610432016-08-08 18:44:38 +01002325 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002326 context()->Plug(v0);
2327}
2328
2329
2330void FullCodeGenerator::CallIC(Handle<Code> code,
2331 TypeFeedbackId id) {
2332 ic_total_count_++;
2333 __ Call(code, RelocInfo::CODE_TARGET, id);
2334}
2335
2336
2337// Code common for calls using the IC.
2338void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2339 Expression* callee = expr->expression();
2340
2341 // Get the target function.
2342 ConvertReceiverMode convert_mode;
2343 if (callee->IsVariableProxy()) {
2344 { StackValueContext context(this);
2345 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002346 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002347 }
2348 // Push undefined as receiver. This is patched in the method prologue if it
2349 // is a sloppy mode method.
2350 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002351 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002352 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2353 } else {
2354 // Load the function from the receiver.
2355 DCHECK(callee->IsProperty());
2356 DCHECK(!callee->AsProperty()->IsSuperAccess());
2357 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2358 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002359 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2360 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002361 // Push the target function under the receiver.
2362 __ lw(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002363 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002364 __ sw(v0, MemOperand(sp, kPointerSize));
2365 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2366 }
2367
2368 EmitCall(expr, convert_mode);
2369}
2370
2371
2372void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2373 SetExpressionPosition(expr);
2374 Expression* callee = expr->expression();
2375 DCHECK(callee->IsProperty());
2376 Property* prop = callee->AsProperty();
2377 DCHECK(prop->IsSuperAccess());
2378
2379 Literal* key = prop->key()->AsLiteral();
2380 DCHECK(!key->value()->IsSmi());
2381 // Load the function from the receiver.
2382 const Register scratch = a1;
2383 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2384 VisitForAccumulatorValue(super_ref->home_object());
2385 __ mov(scratch, v0);
2386 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002387 PushOperands(scratch, v0, v0, scratch);
2388 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002389
2390 // Stack here:
2391 // - home_object
2392 // - this (receiver)
2393 // - this (receiver) <-- LoadFromSuper will pop here and below.
2394 // - home_object
2395 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002396 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002397 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002398
2399 // Replace home_object with target function.
2400 __ sw(v0, MemOperand(sp, kPointerSize));
2401
2402 // Stack here:
2403 // - target function
2404 // - this (receiver)
2405 EmitCall(expr);
2406}
2407
2408
2409// Code common for calls using the IC.
2410void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2411 Expression* key) {
2412 // Load the key.
2413 VisitForAccumulatorValue(key);
2414
2415 Expression* callee = expr->expression();
2416
2417 // Load the function from the receiver.
2418 DCHECK(callee->IsProperty());
2419 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2420 __ Move(LoadDescriptor::NameRegister(), v0);
2421 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002422 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2423 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002424
2425 // Push the target function under the receiver.
2426 __ lw(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002427 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002428 __ sw(v0, MemOperand(sp, kPointerSize));
2429
2430 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2431}
2432
2433
2434void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2435 Expression* callee = expr->expression();
2436 DCHECK(callee->IsProperty());
2437 Property* prop = callee->AsProperty();
2438 DCHECK(prop->IsSuperAccess());
2439
2440 SetExpressionPosition(prop);
2441 // Load the function from the receiver.
2442 const Register scratch = a1;
2443 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2444 VisitForAccumulatorValue(super_ref->home_object());
2445 __ Move(scratch, v0);
2446 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002447 PushOperands(scratch, v0, v0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002448 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002449
2450 // Stack here:
2451 // - home_object
2452 // - this (receiver)
2453 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2454 // - home_object
2455 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002456 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002457 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002458
2459 // Replace home_object with target function.
2460 __ sw(v0, MemOperand(sp, kPointerSize));
2461
2462 // Stack here:
2463 // - target function
2464 // - this (receiver)
2465 EmitCall(expr);
2466}
2467
2468
2469void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2470 // Load the arguments.
2471 ZoneList<Expression*>* args = expr->arguments();
2472 int arg_count = args->length();
2473 for (int i = 0; i < arg_count; i++) {
2474 VisitForStackValue(args->at(i));
2475 }
2476
Ben Murdochc5610432016-08-08 18:44:38 +01002477 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002478 // Record source position of the IC call.
Ben Murdochda12d292016-06-02 14:46:10 +01002479 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002480 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2481 if (FLAG_trace) {
2482 __ CallRuntime(Runtime::kTraceTailCall);
2483 }
2484 // Update profiling counters before the tail call since we will
2485 // not return to this function.
2486 EmitProfilingCounterHandlingForReturnSequence(true);
2487 }
2488 Handle<Code> ic =
2489 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2490 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002491 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2492 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2493 // Don't assign a type feedback id to the IC, since type feedback is provided
2494 // by the vector above.
2495 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002496 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002497
2498 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002499 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002500 context()->DropAndPlug(1, v0);
2501}
2502
Ben Murdochc5610432016-08-08 18:44:38 +01002503void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2504 int arg_count = expr->arguments()->length();
2505 // t4: copy of the first argument or undefined if it doesn't exist.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002506 if (arg_count > 0) {
Ben Murdochc5610432016-08-08 18:44:38 +01002507 __ lw(t4, MemOperand(sp, arg_count * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002508 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002509 __ LoadRoot(t4, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002510 }
2511
Ben Murdochc5610432016-08-08 18:44:38 +01002512 // t3: the receiver of the enclosing function.
2513 __ lw(t3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002514
Ben Murdochc5610432016-08-08 18:44:38 +01002515 // t2: the language mode.
2516 __ li(t2, Operand(Smi::FromInt(language_mode())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002517
Ben Murdochc5610432016-08-08 18:44:38 +01002518 // t1: the start position of the scope the calls resides in.
2519 __ li(t1, Operand(Smi::FromInt(scope()->start_position())));
2520
2521 // t0: the source position of the eval call.
2522 __ li(t0, Operand(Smi::FromInt(expr->position())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002523
2524 // Do the runtime call.
Ben Murdochc5610432016-08-08 18:44:38 +01002525 __ Push(t4, t3, t2, t1, t0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002526 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2527}
2528
2529
2530// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2531void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2532 VariableProxy* callee = expr->expression()->AsVariableProxy();
2533 if (callee->var()->IsLookupSlot()) {
2534 Label slow, done;
2535
2536 SetExpressionPosition(callee);
2537 // Generate code for loading from variables potentially shadowed by
2538 // eval-introduced variables.
2539 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2540
2541 __ bind(&slow);
2542 // Call the runtime to find the function to call (returned in v0)
2543 // and the object holding it (returned in v1).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002544 __ Push(callee->name());
2545 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2546 PushOperands(v0, v1); // Function, receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002547 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002548
2549 // If fast case code has been generated, emit code to push the
2550 // function and receiver and have the slow path jump around this
2551 // code.
2552 if (done.is_linked()) {
2553 Label call;
2554 __ Branch(&call);
2555 __ bind(&done);
2556 // Push function.
2557 __ push(v0);
2558 // The receiver is implicitly the global receiver. Indicate this
2559 // by passing the hole to the call function stub.
2560 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2561 __ push(a1);
2562 __ bind(&call);
2563 }
2564 } else {
2565 VisitForStackValue(callee);
2566 // refEnv.WithBaseObject()
2567 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002568 PushOperand(a2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002569 }
2570}
2571
2572
2573void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002574 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002575 // to resolve the function we need to call. Then we call the resolved
2576 // function using the given arguments.
2577 ZoneList<Expression*>* args = expr->arguments();
2578 int arg_count = args->length();
2579 PushCalleeAndWithBaseObject(expr);
2580
2581 // Push the arguments.
2582 for (int i = 0; i < arg_count; i++) {
2583 VisitForStackValue(args->at(i));
2584 }
2585
2586 // Push a copy of the function (found below the arguments) and
2587 // resolve eval.
2588 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2589 __ push(a1);
Ben Murdochc5610432016-08-08 18:44:38 +01002590 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002591
2592 // Touch up the stack with the resolved function.
2593 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2594
Ben Murdochc5610432016-08-08 18:44:38 +01002595 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002596 // Record source position for debugger.
2597 SetCallPosition(expr);
2598 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2599 __ li(a0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002600 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2601 expr->tail_call_mode()),
2602 RelocInfo::CODE_TARGET);
2603 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002604 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002605 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002606 context()->DropAndPlug(1, v0);
2607}
2608
2609
2610void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2611 Comment cmnt(masm_, "[ CallNew");
2612 // According to ECMA-262, section 11.2.2, page 44, the function
2613 // expression in new calls must be evaluated before the
2614 // arguments.
2615
2616 // Push constructor on the stack. If it's not a function it's used as
2617 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2618 // ignored.g
2619 DCHECK(!expr->expression()->IsSuperPropertyReference());
2620 VisitForStackValue(expr->expression());
2621
2622 // Push the arguments ("left-to-right") on the stack.
2623 ZoneList<Expression*>* args = expr->arguments();
2624 int arg_count = args->length();
2625 for (int i = 0; i < arg_count; i++) {
2626 VisitForStackValue(args->at(i));
2627 }
2628
2629 // Call the construct call builtin that handles allocation and
2630 // constructor invocation.
2631 SetConstructCallPosition(expr);
2632
2633 // Load function and argument count into a1 and a0.
2634 __ li(a0, Operand(arg_count));
2635 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2636
2637 // Record call targets in unoptimized code.
2638 __ EmitLoadTypeFeedbackVector(a2);
2639 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2640
2641 CallConstructStub stub(isolate());
2642 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002643 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002644 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2645 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002646 context()->Plug(v0);
2647}
2648
2649
2650void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2651 SuperCallReference* super_call_ref =
2652 expr->expression()->AsSuperCallReference();
2653 DCHECK_NOT_NULL(super_call_ref);
2654
2655 // Push the super constructor target on the stack (may be null,
2656 // but the Construct builtin can deal with that properly).
2657 VisitForAccumulatorValue(super_call_ref->this_function_var());
2658 __ AssertFunction(result_register());
2659 __ lw(result_register(),
2660 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2661 __ lw(result_register(),
2662 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002663 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002664
2665 // Push the arguments ("left-to-right") on the stack.
2666 ZoneList<Expression*>* args = expr->arguments();
2667 int arg_count = args->length();
2668 for (int i = 0; i < arg_count; i++) {
2669 VisitForStackValue(args->at(i));
2670 }
2671
2672 // Call the construct call builtin that handles allocation and
2673 // constructor invocation.
2674 SetConstructCallPosition(expr);
2675
2676 // Load new target into a3.
2677 VisitForAccumulatorValue(super_call_ref->new_target_var());
2678 __ mov(a3, result_register());
2679
2680 // Load function and argument count into a1 and a0.
2681 __ li(a0, Operand(arg_count));
2682 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2683
2684 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002685 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002686
2687 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002688 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002689 context()->Plug(v0);
2690}
2691
2692
2693void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2694 ZoneList<Expression*>* args = expr->arguments();
2695 DCHECK(args->length() == 1);
2696
2697 VisitForAccumulatorValue(args->at(0));
2698
2699 Label materialize_true, materialize_false;
2700 Label* if_true = NULL;
2701 Label* if_false = NULL;
2702 Label* fall_through = NULL;
2703 context()->PrepareTest(&materialize_true, &materialize_false,
2704 &if_true, &if_false, &fall_through);
2705
2706 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2707 __ SmiTst(v0, t0);
2708 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2709
2710 context()->Plug(if_true, if_false);
2711}
2712
2713
2714void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2715 ZoneList<Expression*>* args = expr->arguments();
2716 DCHECK(args->length() == 1);
2717
2718 VisitForAccumulatorValue(args->at(0));
2719
2720 Label materialize_true, materialize_false;
2721 Label* if_true = NULL;
2722 Label* if_false = NULL;
2723 Label* fall_through = NULL;
2724 context()->PrepareTest(&materialize_true, &materialize_false,
2725 &if_true, &if_false, &fall_through);
2726
2727 __ JumpIfSmi(v0, if_false);
2728 __ GetObjectType(v0, a1, a1);
2729 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2730 Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2731 if_true, if_false, fall_through);
2732
2733 context()->Plug(if_true, if_false);
2734}
2735
2736
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002737void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2738 ZoneList<Expression*>* args = expr->arguments();
2739 DCHECK(args->length() == 1);
2740
2741 VisitForAccumulatorValue(args->at(0));
2742
2743 Label materialize_true, materialize_false;
2744 Label* if_true = NULL;
2745 Label* if_false = NULL;
2746 Label* fall_through = NULL;
2747 context()->PrepareTest(&materialize_true, &materialize_false,
2748 &if_true, &if_false, &fall_through);
2749
2750 __ JumpIfSmi(v0, if_false);
2751 __ GetObjectType(v0, a1, a1);
2752 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2753 Split(eq, a1, Operand(JS_ARRAY_TYPE),
2754 if_true, if_false, fall_through);
2755
2756 context()->Plug(if_true, if_false);
2757}
2758
2759
2760void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2761 ZoneList<Expression*>* args = expr->arguments();
2762 DCHECK(args->length() == 1);
2763
2764 VisitForAccumulatorValue(args->at(0));
2765
2766 Label materialize_true, materialize_false;
2767 Label* if_true = NULL;
2768 Label* if_false = NULL;
2769 Label* fall_through = NULL;
2770 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2771 &if_false, &fall_through);
2772
2773 __ JumpIfSmi(v0, if_false);
2774 __ GetObjectType(v0, a1, a1);
2775 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2776 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
2777
2778 context()->Plug(if_true, if_false);
2779}
2780
2781
2782void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2783 ZoneList<Expression*>* args = expr->arguments();
2784 DCHECK(args->length() == 1);
2785
2786 VisitForAccumulatorValue(args->at(0));
2787
2788 Label materialize_true, materialize_false;
2789 Label* if_true = NULL;
2790 Label* if_false = NULL;
2791 Label* fall_through = NULL;
2792 context()->PrepareTest(&materialize_true, &materialize_false,
2793 &if_true, &if_false, &fall_through);
2794
2795 __ JumpIfSmi(v0, if_false);
2796 __ GetObjectType(v0, a1, a1);
2797 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2798 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2799
2800 context()->Plug(if_true, if_false);
2801}
2802
2803
2804void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2805 ZoneList<Expression*>* args = expr->arguments();
2806 DCHECK(args->length() == 1);
2807
2808 VisitForAccumulatorValue(args->at(0));
2809
2810 Label materialize_true, materialize_false;
2811 Label* if_true = NULL;
2812 Label* if_false = NULL;
2813 Label* fall_through = NULL;
2814 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2815 &if_false, &fall_through);
2816
2817 __ JumpIfSmi(v0, if_false);
2818 __ GetObjectType(v0, a1, a1);
2819 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2820 Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
2821
2822 context()->Plug(if_true, if_false);
2823}
2824
2825
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002826void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2827 ZoneList<Expression*>* args = expr->arguments();
2828 DCHECK(args->length() == 1);
2829 Label done, null, function, non_function_constructor;
2830
2831 VisitForAccumulatorValue(args->at(0));
2832
2833 // If the object is not a JSReceiver, we return null.
2834 __ JumpIfSmi(v0, &null);
2835 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2836 __ GetObjectType(v0, v0, a1); // Map is now in v0.
2837 __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
2838
Ben Murdochda12d292016-06-02 14:46:10 +01002839 // Return 'Function' for JSFunction and JSBoundFunction objects.
2840 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2841 __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002842
2843 // Check if the constructor in the map is a JS function.
2844 Register instance_type = a2;
2845 __ GetMapConstructor(v0, v0, a1, instance_type);
2846 __ Branch(&non_function_constructor, ne, instance_type,
2847 Operand(JS_FUNCTION_TYPE));
2848
2849 // v0 now contains the constructor function. Grab the
2850 // instance class name from there.
2851 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2852 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2853 __ Branch(&done);
2854
2855 // Functions have class 'Function'.
2856 __ bind(&function);
2857 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
2858 __ jmp(&done);
2859
2860 // Objects with a non-function constructor have class 'Object'.
2861 __ bind(&non_function_constructor);
2862 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
2863 __ jmp(&done);
2864
2865 // Non-JS objects have class null.
2866 __ bind(&null);
2867 __ LoadRoot(v0, Heap::kNullValueRootIndex);
2868
2869 // All done.
2870 __ bind(&done);
2871
2872 context()->Plug(v0);
2873}
2874
2875
2876void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2877 ZoneList<Expression*>* args = expr->arguments();
2878 DCHECK(args->length() == 1);
2879
2880 VisitForAccumulatorValue(args->at(0)); // Load the object.
2881
2882 Label done;
2883 // If the object is a smi return the object.
2884 __ JumpIfSmi(v0, &done);
2885 // If the object is not a value type, return the object.
2886 __ GetObjectType(v0, a1, a1);
2887 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
2888
2889 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
2890
2891 __ bind(&done);
2892 context()->Plug(v0);
2893}
2894
2895
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002896void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2897 ZoneList<Expression*>* args = expr->arguments();
2898 DCHECK(args->length() == 1);
2899
2900 VisitForAccumulatorValue(args->at(0));
2901
2902 Label done;
2903 StringCharFromCodeGenerator generator(v0, a1);
2904 generator.GenerateFast(masm_);
2905 __ jmp(&done);
2906
2907 NopRuntimeCallHelper call_helper;
2908 generator.GenerateSlow(masm_, call_helper);
2909
2910 __ bind(&done);
2911 context()->Plug(a1);
2912}
2913
2914
2915void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2916 ZoneList<Expression*>* args = expr->arguments();
2917 DCHECK(args->length() == 2);
2918
2919 VisitForStackValue(args->at(0));
2920 VisitForAccumulatorValue(args->at(1));
2921 __ mov(a0, result_register());
2922
2923 Register object = a1;
2924 Register index = a0;
2925 Register result = v0;
2926
Ben Murdoch097c5b22016-05-18 11:27:45 +01002927 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002928
2929 Label need_conversion;
2930 Label index_out_of_range;
2931 Label done;
Ben Murdoch61f157c2016-09-16 13:49:30 +01002932 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2933 &need_conversion, &index_out_of_range);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002934 generator.GenerateFast(masm_);
2935 __ jmp(&done);
2936
2937 __ bind(&index_out_of_range);
2938 // When the index is out of range, the spec requires us to return
2939 // NaN.
2940 __ LoadRoot(result, Heap::kNanValueRootIndex);
2941 __ jmp(&done);
2942
2943 __ bind(&need_conversion);
2944 // Load the undefined value into the result register, which will
2945 // trigger conversion.
2946 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2947 __ jmp(&done);
2948
2949 NopRuntimeCallHelper call_helper;
2950 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2951
2952 __ bind(&done);
2953 context()->Plug(result);
2954}
2955
2956
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002957void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2958 ZoneList<Expression*>* args = expr->arguments();
2959 DCHECK_LE(2, args->length());
2960 // Push target, receiver and arguments onto the stack.
2961 for (Expression* const arg : *args) {
2962 VisitForStackValue(arg);
2963 }
Ben Murdochc5610432016-08-08 18:44:38 +01002964 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002965 // Move target to a1.
2966 int const argc = args->length() - 2;
2967 __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize));
2968 // Call the target.
2969 __ li(a0, Operand(argc));
2970 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002971 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002972 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002973 // Discard the function left on TOS.
2974 context()->DropAndPlug(1, v0);
2975}
2976
2977
2978void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2979 ZoneList<Expression*>* args = expr->arguments();
2980 VisitForAccumulatorValue(args->at(0));
2981
2982 Label materialize_true, materialize_false;
2983 Label* if_true = NULL;
2984 Label* if_false = NULL;
2985 Label* fall_through = NULL;
2986 context()->PrepareTest(&materialize_true, &materialize_false,
2987 &if_true, &if_false, &fall_through);
2988
2989 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
2990 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
2991
2992 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2993 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
2994
2995 context()->Plug(if_true, if_false);
2996}
2997
2998
2999void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3000 ZoneList<Expression*>* args = expr->arguments();
3001 DCHECK(args->length() == 1);
3002 VisitForAccumulatorValue(args->at(0));
3003
3004 __ AssertString(v0);
3005
3006 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3007 __ IndexFromHash(v0, v0);
3008
3009 context()->Plug(v0);
3010}
3011
3012
3013void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3014 ZoneList<Expression*>* args = expr->arguments();
3015 DCHECK_EQ(1, args->length());
3016 VisitForAccumulatorValue(args->at(0));
3017 __ AssertFunction(v0);
3018 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3019 __ lw(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
3020 context()->Plug(v0);
3021}
3022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003023void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3024 DCHECK(expr->arguments()->length() == 0);
3025 ExternalReference debug_is_active =
3026 ExternalReference::debug_is_active_address(isolate());
3027 __ li(at, Operand(debug_is_active));
3028 __ lb(v0, MemOperand(at));
3029 __ SmiTag(v0);
3030 context()->Plug(v0);
3031}
3032
3033
3034void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3035 ZoneList<Expression*>* args = expr->arguments();
3036 DCHECK_EQ(2, args->length());
3037 VisitForStackValue(args->at(0));
3038 VisitForStackValue(args->at(1));
3039
3040 Label runtime, done;
3041
Ben Murdochc5610432016-08-08 18:44:38 +01003042 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
3043 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003044 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
3045 __ Pop(a2, a3);
3046 __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
3047 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3048 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3049 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
3050 __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
3051 __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
3052 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3053 __ jmp(&done);
3054
3055 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003056 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003057
3058 __ bind(&done);
3059 context()->Plug(v0);
3060}
3061
3062
3063void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003064 // Push function.
3065 __ LoadNativeContextSlot(expr->context_index(), v0);
3066 PushOperand(v0);
3067
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003068 // Push undefined as the receiver.
3069 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003070 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003071}
3072
3073
3074void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3075 ZoneList<Expression*>* args = expr->arguments();
3076 int arg_count = args->length();
3077
3078 SetCallPosition(expr);
3079 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3080 __ li(a0, Operand(arg_count));
3081 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3082 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003083 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003084 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003085}
3086
3087
3088void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3089 switch (expr->op()) {
3090 case Token::DELETE: {
3091 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3092 Property* property = expr->expression()->AsProperty();
3093 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3094
3095 if (property != NULL) {
3096 VisitForStackValue(property->obj());
3097 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003098 CallRuntimeWithOperands(is_strict(language_mode())
3099 ? Runtime::kDeleteProperty_Strict
3100 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003101 context()->Plug(v0);
3102 } else if (proxy != NULL) {
3103 Variable* var = proxy->var();
3104 // Delete of an unqualified identifier is disallowed in strict mode but
3105 // "delete this" is allowed.
3106 bool is_this = var->HasThisName(isolate());
3107 DCHECK(is_sloppy(language_mode()) || is_this);
3108 if (var->IsUnallocatedOrGlobalSlot()) {
3109 __ LoadGlobalObject(a2);
3110 __ li(a1, Operand(var->name()));
3111 __ Push(a2, a1);
3112 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3113 context()->Plug(v0);
3114 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3115 // Result of deleting non-global, non-dynamic variables is false.
3116 // The subexpression does not have side effects.
3117 context()->Plug(is_this);
3118 } else {
3119 // Non-global variable. Call the runtime to try to delete from the
3120 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003121 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003122 __ CallRuntime(Runtime::kDeleteLookupSlot);
3123 context()->Plug(v0);
3124 }
3125 } else {
3126 // Result of deleting non-property, non-variable reference is true.
3127 // The subexpression may have side effects.
3128 VisitForEffect(expr->expression());
3129 context()->Plug(true);
3130 }
3131 break;
3132 }
3133
3134 case Token::VOID: {
3135 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3136 VisitForEffect(expr->expression());
3137 context()->Plug(Heap::kUndefinedValueRootIndex);
3138 break;
3139 }
3140
3141 case Token::NOT: {
3142 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3143 if (context()->IsEffect()) {
3144 // Unary NOT has no side effects so it's only necessary to visit the
3145 // subexpression. Match the optimizing compiler by not branching.
3146 VisitForEffect(expr->expression());
3147 } else if (context()->IsTest()) {
3148 const TestContext* test = TestContext::cast(context());
3149 // The labels are swapped for the recursive call.
3150 VisitForControl(expr->expression(),
3151 test->false_label(),
3152 test->true_label(),
3153 test->fall_through());
3154 context()->Plug(test->true_label(), test->false_label());
3155 } else {
3156 // We handle value contexts explicitly rather than simply visiting
3157 // for control and plugging the control flow into the context,
3158 // because we need to prepare a pair of extra administrative AST ids
3159 // for the optimizing compiler.
3160 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3161 Label materialize_true, materialize_false, done;
3162 VisitForControl(expr->expression(),
3163 &materialize_false,
3164 &materialize_true,
3165 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003166 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003167 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003168 PrepareForBailoutForId(expr->MaterializeTrueId(),
3169 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003170 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3171 if (context()->IsStackValue()) __ push(v0);
3172 __ jmp(&done);
3173 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003174 PrepareForBailoutForId(expr->MaterializeFalseId(),
3175 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003176 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3177 if (context()->IsStackValue()) __ push(v0);
3178 __ bind(&done);
3179 }
3180 break;
3181 }
3182
3183 case Token::TYPEOF: {
3184 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3185 {
3186 AccumulatorValueContext context(this);
3187 VisitForTypeofValue(expr->expression());
3188 }
3189 __ mov(a3, v0);
3190 TypeofStub typeof_stub(isolate());
3191 __ CallStub(&typeof_stub);
3192 context()->Plug(v0);
3193 break;
3194 }
3195
3196 default:
3197 UNREACHABLE();
3198 }
3199}
3200
3201
3202void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3203 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3204
3205 Comment cmnt(masm_, "[ CountOperation");
3206
3207 Property* prop = expr->expression()->AsProperty();
3208 LhsKind assign_type = Property::GetAssignType(prop);
3209
3210 // Evaluate expression and get value.
3211 if (assign_type == VARIABLE) {
3212 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3213 AccumulatorValueContext context(this);
3214 EmitVariableLoad(expr->expression()->AsVariableProxy());
3215 } else {
3216 // Reserve space for result of postfix operation.
3217 if (expr->is_postfix() && !context()->IsEffect()) {
3218 __ li(at, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003219 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003220 }
3221 switch (assign_type) {
3222 case NAMED_PROPERTY: {
3223 // Put the object both on the stack and in the register.
3224 VisitForStackValue(prop->obj());
3225 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3226 EmitNamedPropertyLoad(prop);
3227 break;
3228 }
3229
3230 case NAMED_SUPER_PROPERTY: {
3231 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3232 VisitForAccumulatorValue(
3233 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003234 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003235 const Register scratch = a1;
3236 __ lw(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003237 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003238 EmitNamedSuperPropertyLoad(prop);
3239 break;
3240 }
3241
3242 case KEYED_SUPER_PROPERTY: {
3243 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3244 VisitForAccumulatorValue(
3245 prop->obj()->AsSuperPropertyReference()->home_object());
3246 const Register scratch = a1;
3247 const Register scratch1 = t0;
3248 __ Move(scratch, result_register());
3249 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003250 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003251 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003252 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003253 EmitKeyedSuperPropertyLoad(prop);
3254 break;
3255 }
3256
3257 case KEYED_PROPERTY: {
3258 VisitForStackValue(prop->obj());
3259 VisitForStackValue(prop->key());
3260 __ lw(LoadDescriptor::ReceiverRegister(),
3261 MemOperand(sp, 1 * kPointerSize));
3262 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3263 EmitKeyedPropertyLoad(prop);
3264 break;
3265 }
3266
3267 case VARIABLE:
3268 UNREACHABLE();
3269 }
3270 }
3271
3272 // We need a second deoptimization point after loading the value
3273 // in case evaluating the property load my have a side effect.
3274 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003275 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003276 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003277 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003278 }
3279
3280 // Inline smi case if we are in a loop.
3281 Label stub_call, done;
3282 JumpPatchSite patch_site(masm_);
3283
3284 int count_value = expr->op() == Token::INC ? 1 : -1;
3285 __ mov(a0, v0);
3286 if (ShouldInlineSmiCase(expr->op())) {
3287 Label slow;
3288 patch_site.EmitJumpIfNotSmi(v0, &slow);
3289
3290 // Save result for postfix expressions.
3291 if (expr->is_postfix()) {
3292 if (!context()->IsEffect()) {
3293 // Save the result on the stack. If we have a named or keyed property
3294 // we store the result under the receiver that is currently on top
3295 // of the stack.
3296 switch (assign_type) {
3297 case VARIABLE:
3298 __ push(v0);
3299 break;
3300 case NAMED_PROPERTY:
3301 __ sw(v0, MemOperand(sp, kPointerSize));
3302 break;
3303 case NAMED_SUPER_PROPERTY:
3304 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3305 break;
3306 case KEYED_PROPERTY:
3307 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3308 break;
3309 case KEYED_SUPER_PROPERTY:
3310 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
3311 break;
3312 }
3313 }
3314 }
3315
3316 Register scratch1 = a1;
3317 __ li(scratch1, Operand(Smi::FromInt(count_value)));
3318 __ AddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3319 // Call stub. Undo operation first.
3320 __ Move(v0, a0);
3321 __ jmp(&stub_call);
3322 __ bind(&slow);
3323 }
Ben Murdochda12d292016-06-02 14:46:10 +01003324
3325 // Convert old value into a number.
Ben Murdoch61f157c2016-09-16 13:49:30 +01003326 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdochc5610432016-08-08 18:44:38 +01003327 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003328
3329 // Save result for postfix expressions.
3330 if (expr->is_postfix()) {
3331 if (!context()->IsEffect()) {
3332 // Save the result on the stack. If we have a named or keyed property
3333 // we store the result under the receiver that is currently on top
3334 // of the stack.
3335 switch (assign_type) {
3336 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003337 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003338 break;
3339 case NAMED_PROPERTY:
3340 __ sw(v0, MemOperand(sp, kPointerSize));
3341 break;
3342 case NAMED_SUPER_PROPERTY:
3343 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3344 break;
3345 case KEYED_PROPERTY:
3346 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3347 break;
3348 case KEYED_SUPER_PROPERTY:
3349 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
3350 break;
3351 }
3352 }
3353 }
3354
3355 __ bind(&stub_call);
3356 __ mov(a1, v0);
3357 __ li(a0, Operand(Smi::FromInt(count_value)));
3358
3359 SetExpressionPosition(expr);
3360
Ben Murdoch097c5b22016-05-18 11:27:45 +01003361 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003362 CallIC(code, expr->CountBinOpFeedbackId());
3363 patch_site.EmitPatchInfo();
3364 __ bind(&done);
3365
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003366 // Store the value returned in v0.
3367 switch (assign_type) {
3368 case VARIABLE:
3369 if (expr->is_postfix()) {
3370 { EffectContext context(this);
3371 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3372 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003373 PrepareForBailoutForId(expr->AssignmentId(),
3374 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003375 context.Plug(v0);
3376 }
3377 // For all contexts except EffectConstant we have the result on
3378 // top of the stack.
3379 if (!context()->IsEffect()) {
3380 context()->PlugTOS();
3381 }
3382 } else {
3383 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3384 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003385 PrepareForBailoutForId(expr->AssignmentId(),
3386 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003387 context()->Plug(v0);
3388 }
3389 break;
3390 case NAMED_PROPERTY: {
3391 __ mov(StoreDescriptor::ValueRegister(), result_register());
3392 __ li(StoreDescriptor::NameRegister(),
3393 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003394 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003395 EmitLoadStoreICSlot(expr->CountSlot());
3396 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003397 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003398 if (expr->is_postfix()) {
3399 if (!context()->IsEffect()) {
3400 context()->PlugTOS();
3401 }
3402 } else {
3403 context()->Plug(v0);
3404 }
3405 break;
3406 }
3407 case NAMED_SUPER_PROPERTY: {
3408 EmitNamedSuperPropertyStore(prop);
3409 if (expr->is_postfix()) {
3410 if (!context()->IsEffect()) {
3411 context()->PlugTOS();
3412 }
3413 } else {
3414 context()->Plug(v0);
3415 }
3416 break;
3417 }
3418 case KEYED_SUPER_PROPERTY: {
3419 EmitKeyedSuperPropertyStore(prop);
3420 if (expr->is_postfix()) {
3421 if (!context()->IsEffect()) {
3422 context()->PlugTOS();
3423 }
3424 } else {
3425 context()->Plug(v0);
3426 }
3427 break;
3428 }
3429 case KEYED_PROPERTY: {
3430 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003431 PopOperands(StoreDescriptor::ReceiverRegister(),
3432 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003433 Handle<Code> ic =
3434 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3435 EmitLoadStoreICSlot(expr->CountSlot());
3436 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003437 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003438 if (expr->is_postfix()) {
3439 if (!context()->IsEffect()) {
3440 context()->PlugTOS();
3441 }
3442 } else {
3443 context()->Plug(v0);
3444 }
3445 break;
3446 }
3447 }
3448}
3449
3450
3451void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3452 Expression* sub_expr,
3453 Handle<String> check) {
3454 Label materialize_true, materialize_false;
3455 Label* if_true = NULL;
3456 Label* if_false = NULL;
3457 Label* fall_through = NULL;
3458 context()->PrepareTest(&materialize_true, &materialize_false,
3459 &if_true, &if_false, &fall_through);
3460
3461 { AccumulatorValueContext context(this);
3462 VisitForTypeofValue(sub_expr);
3463 }
3464 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3465
3466 Factory* factory = isolate()->factory();
3467 if (String::Equals(check, factory->number_string())) {
3468 __ JumpIfSmi(v0, if_true);
3469 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3470 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3471 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3472 } else if (String::Equals(check, factory->string_string())) {
3473 __ JumpIfSmi(v0, if_false);
3474 __ GetObjectType(v0, v0, a1);
3475 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3476 fall_through);
3477 } else if (String::Equals(check, factory->symbol_string())) {
3478 __ JumpIfSmi(v0, if_false);
3479 __ GetObjectType(v0, v0, a1);
3480 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3481 } else if (String::Equals(check, factory->boolean_string())) {
3482 __ LoadRoot(at, Heap::kTrueValueRootIndex);
3483 __ Branch(if_true, eq, v0, Operand(at));
3484 __ LoadRoot(at, Heap::kFalseValueRootIndex);
3485 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3486 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003487 __ LoadRoot(at, Heap::kNullValueRootIndex);
3488 __ Branch(if_false, eq, v0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003489 __ JumpIfSmi(v0, if_false);
3490 // Check for undetectable objects => true.
3491 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3492 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3493 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3494 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3495 } else if (String::Equals(check, factory->function_string())) {
3496 __ JumpIfSmi(v0, if_false);
3497 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3498 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3499 __ And(a1, a1,
3500 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3501 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3502 fall_through);
3503 } else if (String::Equals(check, factory->object_string())) {
3504 __ JumpIfSmi(v0, if_false);
3505 __ LoadRoot(at, Heap::kNullValueRootIndex);
3506 __ Branch(if_true, eq, v0, Operand(at));
3507 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3508 __ GetObjectType(v0, v0, a1);
3509 __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3510 // Check for callable or undetectable objects => false.
3511 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3512 __ And(a1, a1,
3513 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3514 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3515// clang-format off
3516#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3517 } else if (String::Equals(check, factory->type##_string())) { \
3518 __ JumpIfSmi(v0, if_false); \
3519 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
3520 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
3521 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3522 SIMD128_TYPES(SIMD128_TYPE)
3523#undef SIMD128_TYPE
3524 // clang-format on
3525 } else {
3526 if (if_false != fall_through) __ jmp(if_false);
3527 }
3528 context()->Plug(if_true, if_false);
3529}
3530
3531
3532void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3533 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003534
3535 // First we try a fast inlined version of the compare when one of
3536 // the operands is a literal.
3537 if (TryLiteralCompare(expr)) return;
3538
3539 // Always perform the comparison for its control flow. Pack the result
3540 // into the expression's context after the comparison is performed.
3541 Label materialize_true, materialize_false;
3542 Label* if_true = NULL;
3543 Label* if_false = NULL;
3544 Label* fall_through = NULL;
3545 context()->PrepareTest(&materialize_true, &materialize_false,
3546 &if_true, &if_false, &fall_through);
3547
3548 Token::Value op = expr->op();
3549 VisitForStackValue(expr->left());
3550 switch (op) {
3551 case Token::IN:
3552 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003553 SetExpressionPosition(expr);
3554 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003555 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3556 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
3557 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
3558 break;
3559
3560 case Token::INSTANCEOF: {
3561 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003562 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003563 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003564 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003565 InstanceOfStub stub(isolate());
3566 __ CallStub(&stub);
3567 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3568 __ LoadRoot(at, Heap::kTrueValueRootIndex);
3569 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3570 break;
3571 }
3572
3573 default: {
3574 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003575 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003576 Condition cc = CompareIC::ComputeCondition(op);
3577 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003578 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003579
3580 bool inline_smi_code = ShouldInlineSmiCase(op);
3581 JumpPatchSite patch_site(masm_);
3582 if (inline_smi_code) {
3583 Label slow_case;
3584 __ Or(a2, a0, Operand(a1));
3585 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
3586 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
3587 __ bind(&slow_case);
3588 }
3589
Ben Murdoch097c5b22016-05-18 11:27:45 +01003590 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003591 CallIC(ic, expr->CompareOperationFeedbackId());
3592 patch_site.EmitPatchInfo();
3593 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3594 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
3595 }
3596 }
3597
3598 // Convert the result of the comparison into one expected for this
3599 // expression's context.
3600 context()->Plug(if_true, if_false);
3601}
3602
3603
3604void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3605 Expression* sub_expr,
3606 NilValue nil) {
3607 Label materialize_true, materialize_false;
3608 Label* if_true = NULL;
3609 Label* if_false = NULL;
3610 Label* fall_through = NULL;
3611 context()->PrepareTest(&materialize_true, &materialize_false,
3612 &if_true, &if_false, &fall_through);
3613
3614 VisitForAccumulatorValue(sub_expr);
3615 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003616 if (expr->op() == Token::EQ_STRICT) {
3617 Heap::RootListIndex nil_value = nil == kNullValue ?
3618 Heap::kNullValueRootIndex :
3619 Heap::kUndefinedValueRootIndex;
3620 __ LoadRoot(a1, nil_value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003621 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
Ben Murdochda12d292016-06-02 14:46:10 +01003622 } else {
3623 __ JumpIfSmi(v0, if_false);
3624 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3625 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3626 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3627 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003628 }
3629 context()->Plug(if_true, if_false);
3630}
3631
3632
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003633Register FullCodeGenerator::result_register() {
3634 return v0;
3635}
3636
3637
3638Register FullCodeGenerator::context_register() {
3639 return cp;
3640}
3641
Ben Murdochda12d292016-06-02 14:46:10 +01003642void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3643 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3644 __ lw(value, MemOperand(fp, frame_offset));
3645}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003646
3647void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3648 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3649 __ sw(value, MemOperand(fp, frame_offset));
3650}
3651
3652
3653void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3654 __ lw(dst, ContextMemOperand(cp, context_index));
3655}
3656
3657
3658void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3659 Scope* closure_scope = scope()->ClosureScope();
3660 if (closure_scope->is_script_scope() ||
3661 closure_scope->is_module_scope()) {
3662 // Contexts nested in the native context have a canonical empty function
3663 // as their closure, not the anonymous closure containing the global
3664 // code.
3665 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
3666 } else if (closure_scope->is_eval_scope()) {
3667 // Contexts created by a call to eval have the same closure as the
3668 // context calling eval, not the anonymous closure containing the eval
3669 // code. Fetch it from the context.
3670 __ lw(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3671 } else {
3672 DCHECK(closure_scope->is_function_scope());
3673 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3674 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003675 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003676}
3677
3678
3679// ----------------------------------------------------------------------------
3680// Non-local control flow support.
3681
3682void FullCodeGenerator::EnterFinallyBlock() {
3683 DCHECK(!result_register().is(a1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003684 // Store pending message while executing finally block.
3685 ExternalReference pending_message_obj =
3686 ExternalReference::address_of_pending_message_obj(isolate());
3687 __ li(at, Operand(pending_message_obj));
3688 __ lw(a1, MemOperand(at));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003689 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003690
3691 ClearPendingMessage();
3692}
3693
3694
3695void FullCodeGenerator::ExitFinallyBlock() {
3696 DCHECK(!result_register().is(a1));
3697 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003698 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003699 ExternalReference pending_message_obj =
3700 ExternalReference::address_of_pending_message_obj(isolate());
3701 __ li(at, Operand(pending_message_obj));
3702 __ sw(a1, MemOperand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003703}
3704
3705
3706void FullCodeGenerator::ClearPendingMessage() {
3707 DCHECK(!result_register().is(a1));
3708 ExternalReference pending_message_obj =
3709 ExternalReference::address_of_pending_message_obj(isolate());
3710 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
3711 __ li(at, Operand(pending_message_obj));
3712 __ sw(a1, MemOperand(at));
3713}
3714
3715
Ben Murdoch097c5b22016-05-18 11:27:45 +01003716void FullCodeGenerator::DeferredCommands::EmitCommands() {
3717 DCHECK(!result_register().is(a1));
3718 __ Pop(result_register()); // Restore the accumulator.
3719 __ Pop(a1); // Get the token.
3720 for (DeferredCommand cmd : commands_) {
3721 Label skip;
3722 __ li(at, Operand(Smi::FromInt(cmd.token)));
3723 __ Branch(&skip, ne, a1, Operand(at));
3724 switch (cmd.command) {
3725 case kReturn:
3726 codegen_->EmitUnwindAndReturn();
3727 break;
3728 case kThrow:
3729 __ Push(result_register());
3730 __ CallRuntime(Runtime::kReThrow);
3731 break;
3732 case kContinue:
3733 codegen_->EmitContinue(cmd.target);
3734 break;
3735 case kBreak:
3736 codegen_->EmitBreak(cmd.target);
3737 break;
3738 }
3739 __ bind(&skip);
3740 }
3741}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003742
3743#undef __
3744
3745
3746void BackEdgeTable::PatchAt(Code* unoptimized_code,
3747 Address pc,
3748 BackEdgeState target_state,
3749 Code* replacement_code) {
3750 static const int kInstrSize = Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01003751 Address pc_immediate_load_address =
3752 Assembler::target_address_from_return_address(pc);
3753 Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003754 Isolate* isolate = unoptimized_code->GetIsolate();
3755 CodePatcher patcher(isolate, branch_address, 1);
3756
3757 switch (target_state) {
3758 case INTERRUPT:
3759 // slt at, a3, zero_reg (in case of count based interrupts)
3760 // beq at, zero_reg, ok
3761 // lui t9, <interrupt stub address> upper
3762 // ori t9, <interrupt stub address> lower
3763 // jalr t9
3764 // nop
3765 // ok-label ----- pc_after points here
3766 patcher.masm()->slt(at, a3, zero_reg);
3767 break;
3768 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003769 // addiu at, zero_reg, 1
3770 // beq at, zero_reg, ok ;; Not changed
3771 // lui t9, <on-stack replacement address> upper
3772 // ori t9, <on-stack replacement address> lower
3773 // jalr t9 ;; Not changed
3774 // nop ;; Not changed
3775 // ok-label ----- pc_after points here
3776 patcher.masm()->addiu(at, zero_reg, 1);
3777 break;
3778 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003779 // Replace the stack check address in the load-immediate (lui/ori pair)
3780 // with the entry address of the replacement code.
3781 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3782 replacement_code->entry());
3783
3784 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3785 unoptimized_code, pc_immediate_load_address, replacement_code);
3786}
3787
3788
3789BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3790 Isolate* isolate,
3791 Code* unoptimized_code,
3792 Address pc) {
3793 static const int kInstrSize = Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01003794 Address pc_immediate_load_address =
3795 Assembler::target_address_from_return_address(pc);
3796 Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003797
Ben Murdochda12d292016-06-02 14:46:10 +01003798 DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003799 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
3800 DCHECK(reinterpret_cast<uint32_t>(
3801 Assembler::target_address_at(pc_immediate_load_address)) ==
3802 reinterpret_cast<uint32_t>(
3803 isolate->builtins()->InterruptCheck()->entry()));
3804 return INTERRUPT;
3805 }
3806
3807 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
3808
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003809 DCHECK(reinterpret_cast<uint32_t>(
Ben Murdochda12d292016-06-02 14:46:10 +01003810 Assembler::target_address_at(pc_immediate_load_address)) ==
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003811 reinterpret_cast<uint32_t>(
Ben Murdochda12d292016-06-02 14:46:10 +01003812 isolate->builtins()->OnStackReplacement()->entry()));
3813 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003814}
3815
3816
3817} // namespace internal
3818} // namespace v8
3819
3820#endif // V8_TARGET_ARCH_MIPS