blob: 681abd123032229b3e00749c159ffa8bc4483056 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS64
6
7// Note on Mips implementation:
8//
9// The result_register() for mips is the 'v0' register, which is defined
10// by the ABI to contain function return values. However, the first
11// parameter to a function is defined to be 'a0'. So there are many
12// places where we have to move a previous result in v0 to a0 for the
13// next call: mov(a0, v0). This is not needed on the other architectures.
14
15#include "src/ast/scopes.h"
16#include "src/code-factory.h"
17#include "src/code-stubs.h"
18#include "src/codegen.h"
19#include "src/debug/debug.h"
20#include "src/full-codegen/full-codegen.h"
21#include "src/ic/ic.h"
22#include "src/parsing/parser.h"
23
24#include "src/mips64/code-stubs-mips64.h"
25#include "src/mips64/macro-assembler-mips64.h"
26
27namespace v8 {
28namespace internal {
29
Ben Murdoch097c5b22016-05-18 11:27:45 +010030#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031
32// A patch site is a location in the code which it is possible to patch. This
33// class has a number of methods to emit the code which is patchable and the
34// method EmitPatchInfo to record a marker back to the patchable code. This
35// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
36// (raw 16 bit immediate value is used) is the delta from the pc to the first
37// instruction of the patchable code.
38// The marker instruction is effectively a NOP (dest is zero_reg) and will
39// never be emitted by normal code.
40class JumpPatchSite BASE_EMBEDDED {
41 public:
42 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
43#ifdef DEBUG
44 info_emitted_ = false;
45#endif
46 }
47
48 ~JumpPatchSite() {
49 DCHECK(patch_site_.is_bound() == info_emitted_);
50 }
51
52 // When initially emitting this ensure that a jump is always generated to skip
53 // the inlined smi code.
54 void EmitJumpIfNotSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
57 __ bind(&patch_site_);
58 __ andi(at, reg, 0);
59 // Always taken before patched.
60 __ BranchShort(target, eq, at, Operand(zero_reg));
61 }
62
63 // When initially emitting this ensure that a jump is never generated to skip
64 // the inlined smi code.
65 void EmitJumpIfSmi(Register reg, Label* target) {
66 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 __ bind(&patch_site_);
69 __ andi(at, reg, 0);
70 // Never taken before patched.
71 __ BranchShort(target, ne, at, Operand(zero_reg));
72 }
73
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
77 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
78 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
79#ifdef DEBUG
80 info_emitted_ = true;
81#endif
82 } else {
83 __ nop(); // Signals no inlined code.
84 }
85 }
86
87 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010088 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089 MacroAssembler* masm_;
90 Label patch_site_;
91#ifdef DEBUG
92 bool info_emitted_;
93#endif
94};
95
96
97// Generate code for a JS function. On entry to the function the receiver
98// and arguments have been pushed on the stack left to right. The actual
99// argument count matches the formal parameter count expected by the
100// function.
101//
102// The live registers are:
103// o a1: the JS function object being called (i.e. ourselves)
104// o a3: the new target value
105// o cp: our context
106// o fp: our caller's frame pointer
107// o sp: stack pointer
108// o ra: return address
109//
110// The function builds a JS frame. Please see JavaScriptFrameConstants in
111// frames-mips.h for its layout.
112void FullCodeGenerator::Generate() {
113 CompilationInfo* info = info_;
114 profiling_counter_ = isolate()->factory()->NewCell(
115 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116 SetFunctionPosition(literal());
117 Comment cmnt(masm_, "[ function compiled by full code generator");
118
119 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000121 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
122 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
123 __ ld(a2, MemOperand(sp, receiver_offset));
124 __ AssertNotSmi(a2);
125 __ GetObjectType(a2, a2, a2);
126 __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
127 Operand(FIRST_JS_RECEIVER_TYPE));
128 }
129
130 // Open a frame scope to indicate that there is a frame on the stack. The
131 // MANUAL indicates that the scope shouldn't actually generate code to set up
132 // the frame (that is done below).
133 FrameScope frame_scope(masm_, StackFrame::MANUAL);
134 info->set_prologue_offset(masm_->pc_offset());
135 __ Prologue(info->GeneratePreagedPrologue());
136
137 { Comment cmnt(masm_, "[ Allocate locals");
138 int locals_count = info->scope()->num_stack_slots();
139 // Generators allocate locals, if any, in context slots.
140 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100141 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 if (locals_count > 0) {
143 if (locals_count >= 128) {
144 Label ok;
145 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
146 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
147 __ Branch(&ok, hs, t1, Operand(a2));
148 __ CallRuntime(Runtime::kThrowStackOverflow);
149 __ bind(&ok);
150 }
151 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
152 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
153 if (locals_count >= kMaxPushes) {
154 int loop_iterations = locals_count / kMaxPushes;
155 __ li(a2, Operand(loop_iterations));
156 Label loop_header;
157 __ bind(&loop_header);
158 // Do pushes.
159 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
160 for (int i = 0; i < kMaxPushes; i++) {
161 __ sd(t1, MemOperand(sp, i * kPointerSize));
162 }
163 // Continue loop if not done.
164 __ Dsubu(a2, a2, Operand(1));
165 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
166 }
167 int remaining = locals_count % kMaxPushes;
168 // Emit the remaining pushes.
169 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
170 for (int i = 0; i < remaining; i++) {
171 __ sd(t1, MemOperand(sp, i * kPointerSize));
172 }
173 }
174 }
175
176 bool function_in_register_a1 = true;
177
178 // Possibly allocate a local context.
179 if (info->scope()->num_heap_slots() > 0) {
180 Comment cmnt(masm_, "[ Allocate context");
181 // Argument to NewContext is the function, which is still in a1.
182 bool need_write_barrier = true;
183 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (info->scope()->is_script_scope()) {
185 __ push(a1);
186 __ Push(info->scope()->GetScopeInfo(info->isolate()));
187 __ CallRuntime(Runtime::kNewScriptContext);
188 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
189 // The new target value is not used, clobbering is safe.
190 DCHECK_NULL(info->scope()->new_target_var());
191 } else {
192 if (info->scope()->new_target_var() != nullptr) {
193 __ push(a3); // Preserve new target.
194 }
195 if (slots <= FastNewContextStub::kMaximumSlots) {
196 FastNewContextStub stub(isolate(), slots);
197 __ CallStub(&stub);
198 // Result of FastNewContextStub is always in new space.
199 need_write_barrier = false;
200 } else {
201 __ push(a1);
202 __ CallRuntime(Runtime::kNewFunctionContext);
203 }
204 if (info->scope()->new_target_var() != nullptr) {
205 __ pop(a3); // Restore new target.
206 }
207 }
208 function_in_register_a1 = false;
209 // Context is returned in v0. It replaces the context passed to us.
210 // It's saved in the stack and kept live in cp.
211 __ mov(cp, v0);
212 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
213 // Copy any necessary parameters into the context.
214 int num_parameters = info->scope()->num_parameters();
215 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
216 for (int i = first_parameter; i < num_parameters; i++) {
217 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
218 if (var->IsContextSlot()) {
219 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
220 (num_parameters - 1 - i) * kPointerSize;
221 // Load parameter from stack.
222 __ ld(a0, MemOperand(fp, parameter_offset));
223 // Store it in the context.
224 MemOperand target = ContextMemOperand(cp, var->index());
225 __ sd(a0, target);
226
227 // Update the write barrier.
228 if (need_write_barrier) {
229 __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
230 kRAHasBeenSaved, kDontSaveFPRegs);
231 } else if (FLAG_debug_code) {
232 Label done;
233 __ JumpIfInNewSpace(cp, a0, &done);
234 __ Abort(kExpectedNewSpaceObject);
235 __ bind(&done);
236 }
237 }
238 }
239 }
240
241 // Register holding this function and new target are both trashed in case we
242 // bailout here. But since that can happen only when new target is not used
243 // and we allocate a context, the value of |function_in_register| is correct.
244 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
245
246 // Possibly set up a local binding to the this function which is used in
247 // derived constructors with super calls.
248 Variable* this_function_var = scope()->this_function_var();
249 if (this_function_var != nullptr) {
250 Comment cmnt(masm_, "[ This function");
251 if (!function_in_register_a1) {
252 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253 // The write barrier clobbers register again, keep it marked as such.
254 }
255 SetVar(this_function_var, a1, a0, a2);
256 }
257
258 Variable* new_target_var = scope()->new_target_var();
259 if (new_target_var != nullptr) {
260 Comment cmnt(masm_, "[ new.target");
261 SetVar(new_target_var, a3, a0, a2);
262 }
263
264 // Possibly allocate RestParameters
265 int rest_index;
266 Variable* rest_param = scope()->rest_parameter(&rest_index);
267 if (rest_param) {
268 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 if (!function_in_register_a1) {
270 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
271 }
272 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100274 function_in_register_a1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 SetVar(rest_param, v0, a1, a2);
276 }
277
278 Variable* arguments = scope()->arguments();
279 if (arguments != NULL) {
280 // Function uses arguments object.
281 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000282 if (!function_in_register_a1) {
283 // Load this again, if it's used by the local context below.
284 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
285 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100286 if (is_strict(language_mode()) || !has_simple_parameters()) {
287 FastNewStrictArgumentsStub stub(isolate());
288 __ CallStub(&stub);
289 } else if (literal()->has_duplicate_parameters()) {
290 __ Push(a1);
291 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
292 } else {
293 FastNewSloppyArgumentsStub stub(isolate());
294 __ CallStub(&stub);
295 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296
297 SetVar(arguments, v0, a1, a2);
298 }
299
300 if (FLAG_trace) {
301 __ CallRuntime(Runtime::kTraceEnter);
302 }
303
Ben Murdochda12d292016-06-02 14:46:10 +0100304 // Visit the declarations and body.
305 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
306 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100308 VisitDeclarations(scope()->declarations());
309 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310
Ben Murdochda12d292016-06-02 14:46:10 +0100311 // Assert that the declarations do not use ICs. Otherwise the debugger
312 // won't be able to redirect a PC at an IC to the correct IC in newly
313 // recompiled code.
314 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000315
Ben Murdochda12d292016-06-02 14:46:10 +0100316 {
317 Comment cmnt(masm_, "[ Stack check");
318 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
319 Label ok;
320 __ LoadRoot(at, Heap::kStackLimitRootIndex);
321 __ Branch(&ok, hs, sp, Operand(at));
322 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
323 PredictableCodeSizeScope predictable(
324 masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
325 __ Call(stack_check, RelocInfo::CODE_TARGET);
326 __ bind(&ok);
327 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328
Ben Murdochda12d292016-06-02 14:46:10 +0100329 {
330 Comment cmnt(masm_, "[ Body");
331 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332
Ben Murdochda12d292016-06-02 14:46:10 +0100333 VisitStatements(literal()->body());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334
Ben Murdochda12d292016-06-02 14:46:10 +0100335 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000336 }
337
338 // Always emit a 'return undefined' in case control fell off the end of
339 // the body.
340 { Comment cmnt(masm_, "[ return <undefined>;");
341 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
342 }
343 EmitReturnSequence();
344}
345
346
347void FullCodeGenerator::ClearAccumulator() {
348 DCHECK(Smi::FromInt(0) == 0);
349 __ mov(v0, zero_reg);
350}
351
352
353void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
354 __ li(a2, Operand(profiling_counter_));
355 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
356 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
357 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
358}
359
360
361void FullCodeGenerator::EmitProfilingCounterReset() {
362 int reset_value = FLAG_interrupt_budget;
363 if (info_->is_debug()) {
364 // Detect debug break requests as soon as possible.
365 reset_value = FLAG_interrupt_budget >> 4;
366 }
367 __ li(a2, Operand(profiling_counter_));
368 __ li(a3, Operand(Smi::FromInt(reset_value)));
369 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
370}
371
372
373void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
374 Label* back_edge_target) {
375 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
376 // to make sure it is constant. Branch may emit a skip-or-jump sequence
377 // instead of the normal Branch. It seems that the "skip" part of that
378 // sequence is about as long as this Branch would be so it is safe to ignore
379 // that.
380 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
381 Comment cmnt(masm_, "[ Back edge bookkeeping");
382 Label ok;
383 DCHECK(back_edge_target->is_bound());
384 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
385 int weight = Min(kMaxBackEdgeWeight,
386 Max(1, distance / kCodeSizeMultiplier));
387 EmitProfilingCounterDecrement(weight);
388 __ slt(at, a3, zero_reg);
389 __ beq(at, zero_reg, &ok);
390 // Call will emit a li t9 first, so it is safe to use the delay slot.
391 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
392 // Record a mapping of this PC offset to the OSR id. This is used to find
393 // the AST id from the unoptimized code in order to use it as a key into
394 // the deoptimization input data found in the optimized code.
395 RecordBackEdge(stmt->OsrEntryId());
396 EmitProfilingCounterReset();
397
398 __ bind(&ok);
399 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
400 // Record a mapping of the OSR id to this PC. This is used if the OSR
401 // entry becomes the target of a bailout. We don't expect it to be, but
402 // we want it to work if it is.
403 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
404}
405
Ben Murdoch097c5b22016-05-18 11:27:45 +0100406void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
407 bool is_tail_call) {
408 // Pretend that the exit is a backwards jump to the entry.
409 int weight = 1;
410 if (info_->ShouldSelfOptimize()) {
411 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
412 } else {
413 int distance = masm_->pc_offset();
414 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
415 }
416 EmitProfilingCounterDecrement(weight);
417 Label ok;
418 __ Branch(&ok, ge, a3, Operand(zero_reg));
419 // Don't need to save result register if we are going to do a tail call.
420 if (!is_tail_call) {
421 __ push(v0);
422 }
423 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
424 if (!is_tail_call) {
425 __ pop(v0);
426 }
427 EmitProfilingCounterReset();
428 __ bind(&ok);
429}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000430
431void FullCodeGenerator::EmitReturnSequence() {
432 Comment cmnt(masm_, "[ Return sequence");
433 if (return_label_.is_bound()) {
434 __ Branch(&return_label_);
435 } else {
436 __ bind(&return_label_);
437 if (FLAG_trace) {
438 // Push the return value on the stack as the parameter.
439 // Runtime::TraceExit returns its parameter in v0.
440 __ push(v0);
441 __ CallRuntime(Runtime::kTraceExit);
442 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100443 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000444
445 // Make sure that the constant pool is not emitted inside of the return
446 // sequence.
447 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
448 // Here we use masm_-> instead of the __ macro to avoid the code coverage
449 // tool from instrumenting as we rely on the code size here.
450 int32_t arg_count = info_->scope()->num_parameters() + 1;
451 int32_t sp_delta = arg_count * kPointerSize;
452 SetReturnPosition(literal());
453 masm_->mov(sp, fp);
454 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
455 masm_->Daddu(sp, sp, Operand(sp_delta));
456 masm_->Jump(ra);
457 }
458 }
459}
460
461
462void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
463 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
464 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100465 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000466}
467
468
469void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
470}
471
472
473void FullCodeGenerator::AccumulatorValueContext::Plug(
474 Heap::RootListIndex index) const {
475 __ LoadRoot(result_register(), index);
476}
477
478
479void FullCodeGenerator::StackValueContext::Plug(
480 Heap::RootListIndex index) const {
481 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100482 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483}
484
485
486void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
487 codegen()->PrepareForBailoutBeforeSplit(condition(),
488 true,
489 true_label_,
490 false_label_);
491 if (index == Heap::kUndefinedValueRootIndex ||
492 index == Heap::kNullValueRootIndex ||
493 index == Heap::kFalseValueRootIndex) {
494 if (false_label_ != fall_through_) __ Branch(false_label_);
495 } else if (index == Heap::kTrueValueRootIndex) {
496 if (true_label_ != fall_through_) __ Branch(true_label_);
497 } else {
498 __ LoadRoot(result_register(), index);
499 codegen()->DoTest(this);
500 }
501}
502
503
504void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
505}
506
507
508void FullCodeGenerator::AccumulatorValueContext::Plug(
509 Handle<Object> lit) const {
510 __ li(result_register(), Operand(lit));
511}
512
513
514void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
515 // Immediates cannot be pushed directly.
516 __ li(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100517 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518}
519
520
521void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
522 codegen()->PrepareForBailoutBeforeSplit(condition(),
523 true,
524 true_label_,
525 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100526 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000527 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
528 if (false_label_ != fall_through_) __ Branch(false_label_);
529 } else if (lit->IsTrue() || lit->IsJSObject()) {
530 if (true_label_ != fall_through_) __ Branch(true_label_);
531 } else if (lit->IsString()) {
532 if (String::cast(*lit)->length() == 0) {
533 if (false_label_ != fall_through_) __ Branch(false_label_);
534 } else {
535 if (true_label_ != fall_through_) __ Branch(true_label_);
536 }
537 } else if (lit->IsSmi()) {
538 if (Smi::cast(*lit)->value() == 0) {
539 if (false_label_ != fall_through_) __ Branch(false_label_);
540 } else {
541 if (true_label_ != fall_through_) __ Branch(true_label_);
542 }
543 } else {
544 // For simplicity we always test the accumulator register.
545 __ li(result_register(), Operand(lit));
546 codegen()->DoTest(this);
547 }
548}
549
550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
552 Register reg) const {
553 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100554 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000555 __ sd(reg, MemOperand(sp, 0));
556}
557
558
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000559void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
560 Label* materialize_false) const {
561 DCHECK(materialize_true == materialize_false);
562 __ bind(materialize_true);
563}
564
565
566void FullCodeGenerator::AccumulatorValueContext::Plug(
567 Label* materialize_true,
568 Label* materialize_false) const {
569 Label done;
570 __ bind(materialize_true);
571 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
572 __ Branch(&done);
573 __ bind(materialize_false);
574 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
575 __ bind(&done);
576}
577
578
579void FullCodeGenerator::StackValueContext::Plug(
580 Label* materialize_true,
581 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100582 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000583 Label done;
584 __ bind(materialize_true);
585 __ LoadRoot(at, Heap::kTrueValueRootIndex);
586 // Push the value as the following branch can clobber at in long branch mode.
587 __ push(at);
588 __ Branch(&done);
589 __ bind(materialize_false);
590 __ LoadRoot(at, Heap::kFalseValueRootIndex);
591 __ push(at);
592 __ bind(&done);
593}
594
595
596void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
597 Label* materialize_false) const {
598 DCHECK(materialize_true == true_label_);
599 DCHECK(materialize_false == false_label_);
600}
601
602
603void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
604 Heap::RootListIndex value_root_index =
605 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
606 __ LoadRoot(result_register(), value_root_index);
607}
608
609
610void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
611 Heap::RootListIndex value_root_index =
612 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
613 __ LoadRoot(at, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100614 codegen()->PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000615}
616
617
618void FullCodeGenerator::TestContext::Plug(bool flag) const {
619 codegen()->PrepareForBailoutBeforeSplit(condition(),
620 true,
621 true_label_,
622 false_label_);
623 if (flag) {
624 if (true_label_ != fall_through_) __ Branch(true_label_);
625 } else {
626 if (false_label_ != fall_through_) __ Branch(false_label_);
627 }
628}
629
630
631void FullCodeGenerator::DoTest(Expression* condition,
632 Label* if_true,
633 Label* if_false,
634 Label* fall_through) {
635 __ mov(a0, result_register());
Ben Murdochda12d292016-06-02 14:46:10 +0100636 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000637 CallIC(ic, condition->test_id());
638 __ LoadRoot(at, Heap::kTrueValueRootIndex);
639 Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
640}
641
642
643void FullCodeGenerator::Split(Condition cc,
644 Register lhs,
645 const Operand& rhs,
646 Label* if_true,
647 Label* if_false,
648 Label* fall_through) {
649 if (if_false == fall_through) {
650 __ Branch(if_true, cc, lhs, rhs);
651 } else if (if_true == fall_through) {
652 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
653 } else {
654 __ Branch(if_true, cc, lhs, rhs);
655 __ Branch(if_false);
656 }
657}
658
659
660MemOperand FullCodeGenerator::StackOperand(Variable* var) {
661 DCHECK(var->IsStackAllocated());
662 // Offset is negative because higher indexes are at lower addresses.
663 int offset = -var->index() * kPointerSize;
664 // Adjust by a (parameter or local) base offset.
665 if (var->IsParameter()) {
666 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
667 } else {
668 offset += JavaScriptFrameConstants::kLocal0Offset;
669 }
670 return MemOperand(fp, offset);
671}
672
673
674MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
675 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
676 if (var->IsContextSlot()) {
677 int context_chain_length = scope()->ContextChainLength(var->scope());
678 __ LoadContext(scratch, context_chain_length);
679 return ContextMemOperand(scratch, var->index());
680 } else {
681 return StackOperand(var);
682 }
683}
684
685
686void FullCodeGenerator::GetVar(Register dest, Variable* var) {
687 // Use destination as scratch.
688 MemOperand location = VarOperand(var, dest);
689 __ ld(dest, location);
690}
691
692
693void FullCodeGenerator::SetVar(Variable* var,
694 Register src,
695 Register scratch0,
696 Register scratch1) {
697 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
698 DCHECK(!scratch0.is(src));
699 DCHECK(!scratch0.is(scratch1));
700 DCHECK(!scratch1.is(src));
701 MemOperand location = VarOperand(var, scratch0);
702 __ sd(src, location);
703 // Emit the write barrier code if the location is in the heap.
704 if (var->IsContextSlot()) {
705 __ RecordWriteContextSlot(scratch0,
706 location.offset(),
707 src,
708 scratch1,
709 kRAHasBeenSaved,
710 kDontSaveFPRegs);
711 }
712}
713
714
715void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
716 bool should_normalize,
717 Label* if_true,
718 Label* if_false) {
719 // Only prepare for bailouts before splits if we're in a test
720 // context. Otherwise, we let the Visit function deal with the
721 // preparation to avoid preparing with the same AST id twice.
722 if (!context()->IsTest()) return;
723
724 Label skip;
725 if (should_normalize) __ Branch(&skip);
726 PrepareForBailout(expr, TOS_REG);
727 if (should_normalize) {
728 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
729 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
730 __ bind(&skip);
731 }
732}
733
734
735void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
736 // The variable in the declaration always resides in the current function
737 // context.
738 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100739 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000740 // Check that we're not inside a with or catch context.
741 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
742 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
743 __ Check(ne, kDeclarationInWithContext,
744 a1, Operand(a4));
745 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
746 __ Check(ne, kDeclarationInCatchContext,
747 a1, Operand(a4));
748 }
749}
750
751
752void FullCodeGenerator::VisitVariableDeclaration(
753 VariableDeclaration* declaration) {
754 // If it was not possible to allocate the variable at compile time, we
755 // need to "declare" it at runtime to make sure it actually exists in the
756 // local context.
757 VariableProxy* proxy = declaration->proxy();
758 VariableMode mode = declaration->mode();
759 Variable* variable = proxy->var();
760 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
761 switch (variable->location()) {
762 case VariableLocation::GLOBAL:
763 case VariableLocation::UNALLOCATED:
764 globals_->Add(variable->name(), zone());
765 globals_->Add(variable->binding_needs_init()
766 ? isolate()->factory()->the_hole_value()
767 : isolate()->factory()->undefined_value(),
768 zone());
769 break;
770
771 case VariableLocation::PARAMETER:
772 case VariableLocation::LOCAL:
773 if (hole_init) {
774 Comment cmnt(masm_, "[ VariableDeclaration");
775 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
776 __ sd(a4, StackOperand(variable));
777 }
778 break;
779
780 case VariableLocation::CONTEXT:
781 if (hole_init) {
782 Comment cmnt(masm_, "[ VariableDeclaration");
783 EmitDebugCheckDeclarationContext(variable);
784 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
785 __ sd(at, ContextMemOperand(cp, variable->index()));
786 // No write barrier since the_hole_value is in old space.
787 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
788 }
789 break;
790
791 case VariableLocation::LOOKUP: {
792 Comment cmnt(masm_, "[ VariableDeclaration");
793 __ li(a2, Operand(variable->name()));
794 // Declaration nodes are always introduced in one of four modes.
795 DCHECK(IsDeclaredVariableMode(mode));
796 // Push initial value, if any.
797 // Note: For variables we must not push an initial value (such as
798 // 'undefined') because we may have a (legal) redeclaration and we
799 // must not destroy the current value.
800 if (hole_init) {
801 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
802 } else {
803 DCHECK(Smi::FromInt(0) == 0);
804 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
805 }
806 __ Push(a2, a0);
807 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
808 __ CallRuntime(Runtime::kDeclareLookupSlot);
809 break;
810 }
811 }
812}
813
814
815void FullCodeGenerator::VisitFunctionDeclaration(
816 FunctionDeclaration* declaration) {
817 VariableProxy* proxy = declaration->proxy();
818 Variable* variable = proxy->var();
819 switch (variable->location()) {
820 case VariableLocation::GLOBAL:
821 case VariableLocation::UNALLOCATED: {
822 globals_->Add(variable->name(), zone());
823 Handle<SharedFunctionInfo> function =
824 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
825 // Check for stack-overflow exception.
826 if (function.is_null()) return SetStackOverflow();
827 globals_->Add(function, zone());
828 break;
829 }
830
831 case VariableLocation::PARAMETER:
832 case VariableLocation::LOCAL: {
833 Comment cmnt(masm_, "[ FunctionDeclaration");
834 VisitForAccumulatorValue(declaration->fun());
835 __ sd(result_register(), StackOperand(variable));
836 break;
837 }
838
839 case VariableLocation::CONTEXT: {
840 Comment cmnt(masm_, "[ FunctionDeclaration");
841 EmitDebugCheckDeclarationContext(variable);
842 VisitForAccumulatorValue(declaration->fun());
843 __ sd(result_register(), ContextMemOperand(cp, variable->index()));
844 int offset = Context::SlotOffset(variable->index());
845 // We know that we have written a function, which is not a smi.
846 __ RecordWriteContextSlot(cp,
847 offset,
848 result_register(),
849 a2,
850 kRAHasBeenSaved,
851 kDontSaveFPRegs,
852 EMIT_REMEMBERED_SET,
853 OMIT_SMI_CHECK);
854 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
855 break;
856 }
857
858 case VariableLocation::LOOKUP: {
859 Comment cmnt(masm_, "[ FunctionDeclaration");
860 __ li(a2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100861 PushOperand(a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000862 // Push initial value for function declaration.
863 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100864 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
865 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000866 break;
867 }
868 }
869}
870
871
872void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
873 // Call the runtime to declare the globals.
874 __ li(a1, Operand(pairs));
875 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
876 __ Push(a1, a0);
877 __ CallRuntime(Runtime::kDeclareGlobals);
878 // Return value is ignored.
879}
880
881
882void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
883 // Call the runtime to declare the modules.
884 __ Push(descriptions);
885 __ CallRuntime(Runtime::kDeclareModules);
886 // Return value is ignored.
887}
888
889
890void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
891 Comment cmnt(masm_, "[ SwitchStatement");
892 Breakable nested_statement(this, stmt);
893 SetStatementPosition(stmt);
894
895 // Keep the switch value on the stack until a case matches.
896 VisitForStackValue(stmt->tag());
897 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
898
899 ZoneList<CaseClause*>* clauses = stmt->cases();
900 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
901
902 Label next_test; // Recycled for each test.
903 // Compile all the tests with branches to their bodies.
904 for (int i = 0; i < clauses->length(); i++) {
905 CaseClause* clause = clauses->at(i);
906 clause->body_target()->Unuse();
907
908 // The default is not a test, but remember it as final fall through.
909 if (clause->is_default()) {
910 default_clause = clause;
911 continue;
912 }
913
914 Comment cmnt(masm_, "[ Case comparison");
915 __ bind(&next_test);
916 next_test.Unuse();
917
918 // Compile the label expression.
919 VisitForAccumulatorValue(clause->label());
920 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
921
922 // Perform the comparison as if via '==='.
923 __ ld(a1, MemOperand(sp, 0)); // Switch value.
924 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
925 JumpPatchSite patch_site(masm_);
926 if (inline_smi_code) {
927 Label slow_case;
928 __ or_(a2, a1, a0);
929 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
930
931 __ Branch(&next_test, ne, a1, Operand(a0));
932 __ Drop(1); // Switch value is no longer needed.
933 __ Branch(clause->body_target());
934
935 __ bind(&slow_case);
936 }
937
938 // Record position before stub call for type feedback.
939 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100940 Handle<Code> ic =
941 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942 CallIC(ic, clause->CompareId());
943 patch_site.EmitPatchInfo();
944
945 Label skip;
946 __ Branch(&skip);
947 PrepareForBailout(clause, TOS_REG);
948 __ LoadRoot(at, Heap::kTrueValueRootIndex);
949 __ Branch(&next_test, ne, v0, Operand(at));
950 __ Drop(1);
951 __ Branch(clause->body_target());
952 __ bind(&skip);
953
954 __ Branch(&next_test, ne, v0, Operand(zero_reg));
955 __ Drop(1); // Switch value is no longer needed.
956 __ Branch(clause->body_target());
957 }
958
959 // Discard the test value and jump to the default if present, otherwise to
960 // the end of the statement.
961 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100962 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000963 if (default_clause == NULL) {
964 __ Branch(nested_statement.break_label());
965 } else {
966 __ Branch(default_clause->body_target());
967 }
968
969 // Compile all the case bodies.
970 for (int i = 0; i < clauses->length(); i++) {
971 Comment cmnt(masm_, "[ Case body");
972 CaseClause* clause = clauses->at(i);
973 __ bind(clause->body_target());
974 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
975 VisitStatements(clause->statements());
976 }
977
978 __ bind(nested_statement.break_label());
979 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
980}
981
982
983void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
984 Comment cmnt(masm_, "[ ForInStatement");
985 SetStatementPosition(stmt, SKIP_BREAK);
986
987 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
988
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000989 // Get the object to enumerate over. If the object is null or undefined, skip
990 // over the loop. See ECMA-262 version 5, section 12.6.4.
991 SetExpressionAsStatementPosition(stmt->enumerable());
992 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100993 __ mov(a0, result_register());
Ben Murdochda12d292016-06-02 14:46:10 +0100994 OperandStackDepthIncrement(5);
995
996 Label loop, exit;
997 Iteration loop_statement(this, stmt);
998 increment_loop_depth();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100999
1000 // If the object is null or undefined, skip over the loop, otherwise convert
1001 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001002 Label convert, done_convert;
1003 __ JumpIfSmi(a0, &convert);
1004 __ GetObjectType(a0, a1, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001005 __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1006 Operand(FIRST_JS_RECEIVER_TYPE));
1007 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot.
1008 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1009 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot.
1010 __ Branch(&exit, eq, a0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001011 __ bind(&convert);
1012 ToObjectStub stub(isolate());
1013 __ CallStub(&stub);
1014 __ mov(a0, v0);
1015 __ bind(&done_convert);
1016 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1017 __ push(a0);
1018
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001019 // Check cache validity in generated code. This is a fast case for
1020 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1021 // guarantee cache validity, call the runtime system to check cache
1022 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001023 // Note: Proxies never have an enum cache, so will always take the
1024 // slow path.
1025 Label call_runtime;
1026 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001027
1028 // The enum cache is valid. Load the map of the object being
1029 // iterated over and use the cache for the iteration.
1030 Label use_cache;
1031 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1032 __ Branch(&use_cache);
1033
1034 // Get the set of properties to enumerate.
1035 __ bind(&call_runtime);
1036 __ push(a0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001037 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1039
1040 // If we got a map from the runtime call, we can do a fast
1041 // modification check. Otherwise, we got a fixed array, and we have
1042 // to do a slow check.
1043 Label fixed_array;
1044 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1045 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1046 __ Branch(&fixed_array, ne, a2, Operand(at));
1047
1048 // We got a map in register v0. Get the enumeration cache from it.
1049 Label no_descriptors;
1050 __ bind(&use_cache);
1051
1052 __ EnumLength(a1, v0);
1053 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1054
1055 __ LoadInstanceDescriptors(v0, a2);
1056 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1057 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1058
1059 // Set up the four remaining stack slots.
1060 __ li(a0, Operand(Smi::FromInt(0)));
1061 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1062 __ Push(v0, a2, a1, a0);
1063 __ jmp(&loop);
1064
1065 __ bind(&no_descriptors);
1066 __ Drop(1);
1067 __ jmp(&exit);
1068
1069 // We got a fixed array in register v0. Iterate through that.
1070 __ bind(&fixed_array);
1071
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001072 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1073 __ Push(a1, v0); // Smi and array
1074 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001075 __ Push(a1); // Fixed array length (as smi).
1076 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 __ li(a0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001078 __ Push(a0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079
1080 // Generate code for doing the condition check.
1081 __ bind(&loop);
1082 SetExpressionAsStatementPosition(stmt->each());
1083
1084 // Load the current count to a0, load the length to a1.
1085 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1086 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1087 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1088
1089 // Get the current entry of the array into register a3.
1090 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1091 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1092 __ SmiScale(a4, a0, kPointerSizeLog2);
1093 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1094 __ ld(a3, MemOperand(a4)); // Current entry.
1095
1096 // Get the expected map from the stack or a smi in the
1097 // permanent slow case into register a2.
1098 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1099
1100 // Check if the expected map still matches that of the enumerable.
1101 // If not, we may have to filter the key.
1102 Label update_each;
1103 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1104 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1105 __ Branch(&update_each, eq, a4, Operand(a2));
1106
Ben Murdochda12d292016-06-02 14:46:10 +01001107 // We need to filter the key, record slow-path here.
1108 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001109 __ EmitLoadTypeFeedbackVector(a0);
1110 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1111 __ sd(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index)));
1112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001113 // Convert the entry to a string or (smi) 0 if it isn't a property
1114 // any more. If the property has been removed while iterating, we
1115 // just skip it.
1116 __ Push(a1, a3); // Enumerable and current entry.
1117 __ CallRuntime(Runtime::kForInFilter);
1118 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1119 __ mov(a3, result_register());
1120 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1121 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1122
1123 // Update the 'each' property or variable from the possibly filtered
1124 // entry in register a3.
1125 __ bind(&update_each);
1126 __ mov(result_register(), a3);
1127 // Perform the assignment as if via '='.
1128 { EffectContext context(this);
1129 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1130 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1131 }
1132
1133 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1134 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1135 // Generate code for the body of the loop.
1136 Visit(stmt->body());
1137
1138 // Generate code for the going to the next element by incrementing
1139 // the index (smi) stored on top of the stack.
1140 __ bind(loop_statement.continue_label());
1141 __ pop(a0);
1142 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1143 __ push(a0);
1144
1145 EmitBackEdgeBookkeeping(stmt, &loop);
1146 __ Branch(&loop);
1147
1148 // Remove the pointers stored on the stack.
1149 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001150 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151
1152 // Exit and decrement the loop depth.
1153 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1154 __ bind(&exit);
1155 decrement_loop_depth();
1156}
1157
1158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001159void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1160 FeedbackVectorSlot slot) {
1161 DCHECK(NeedsHomeObject(initializer));
1162 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1163 __ li(StoreDescriptor::NameRegister(),
1164 Operand(isolate()->factory()->home_object_symbol()));
1165 __ ld(StoreDescriptor::ValueRegister(),
1166 MemOperand(sp, offset * kPointerSize));
1167 EmitLoadStoreICSlot(slot);
1168 CallStoreIC();
1169}
1170
1171
1172void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1173 int offset,
1174 FeedbackVectorSlot slot) {
1175 DCHECK(NeedsHomeObject(initializer));
1176 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1177 __ li(StoreDescriptor::NameRegister(),
1178 Operand(isolate()->factory()->home_object_symbol()));
1179 __ ld(StoreDescriptor::ValueRegister(),
1180 MemOperand(sp, offset * kPointerSize));
1181 EmitLoadStoreICSlot(slot);
1182 CallStoreIC();
1183}
1184
1185
1186void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1187 TypeofMode typeof_mode,
1188 Label* slow) {
1189 Register current = cp;
1190 Register next = a1;
1191 Register temp = a2;
1192
1193 Scope* s = scope();
1194 while (s != NULL) {
1195 if (s->num_heap_slots() > 0) {
1196 if (s->calls_sloppy_eval()) {
1197 // Check that extension is "the hole".
1198 __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1199 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1200 }
1201 // Load next context in chain.
1202 __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1203 // Walk the rest of the chain without clobbering cp.
1204 current = next;
1205 }
1206 // If no outer scope calls eval, we do not need to check more
1207 // context extensions.
1208 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1209 s = s->outer_scope();
1210 }
1211
1212 if (s->is_eval_scope()) {
1213 Label loop, fast;
1214 if (!current.is(next)) {
1215 __ Move(next, current);
1216 }
1217 __ bind(&loop);
1218 // Terminate at native context.
1219 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1220 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1221 __ Branch(&fast, eq, temp, Operand(a4));
1222 // Check that extension is "the hole".
1223 __ ld(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1224 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1225 // Load next context in chain.
1226 __ ld(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1227 __ Branch(&loop);
1228 __ bind(&fast);
1229 }
1230
1231 // All extension objects were empty and it is safe to use a normal global
1232 // load machinery.
1233 EmitGlobalVariableLoad(proxy, typeof_mode);
1234}
1235
1236
1237MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1238 Label* slow) {
1239 DCHECK(var->IsContextSlot());
1240 Register context = cp;
1241 Register next = a3;
1242 Register temp = a4;
1243
1244 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1245 if (s->num_heap_slots() > 0) {
1246 if (s->calls_sloppy_eval()) {
1247 // Check that extension is "the hole".
1248 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1249 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1250 }
1251 __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1252 // Walk the rest of the chain without clobbering cp.
1253 context = next;
1254 }
1255 }
1256 // Check that last extension is "the hole".
1257 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1258 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1259
1260 // This function is used only for loads, not stores, so it's safe to
1261 // return an cp-based operand (the write barrier cannot be allowed to
1262 // destroy the cp register).
1263 return ContextMemOperand(context, var->index());
1264}
1265
1266
1267void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1268 TypeofMode typeof_mode,
1269 Label* slow, Label* done) {
1270 // Generate fast-case code for variables that might be shadowed by
1271 // eval-introduced variables. Eval is used a lot without
1272 // introducing variables. In those cases, we do not want to
1273 // perform a runtime call for all variables in the scope
1274 // containing the eval.
1275 Variable* var = proxy->var();
1276 if (var->mode() == DYNAMIC_GLOBAL) {
1277 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1278 __ Branch(done);
1279 } else if (var->mode() == DYNAMIC_LOCAL) {
1280 Variable* local = var->local_if_not_shadowed();
1281 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1282 if (local->mode() == LET || local->mode() == CONST ||
1283 local->mode() == CONST_LEGACY) {
1284 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1285 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1286 if (local->mode() == CONST_LEGACY) {
1287 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1288 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1289 } else { // LET || CONST
1290 __ Branch(done, ne, at, Operand(zero_reg));
1291 __ li(a0, Operand(var->name()));
1292 __ push(a0);
1293 __ CallRuntime(Runtime::kThrowReferenceError);
1294 }
1295 }
1296 __ Branch(done);
1297 }
1298}
1299
1300
1301void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1302 TypeofMode typeof_mode) {
1303 Variable* var = proxy->var();
1304 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1305 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1306 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1307 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1308 __ li(LoadDescriptor::SlotRegister(),
1309 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1310 CallLoadIC(typeof_mode);
1311}
1312
1313
1314void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1315 TypeofMode typeof_mode) {
1316 // Record position before possible IC call.
1317 SetExpressionPosition(proxy);
1318 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1319 Variable* var = proxy->var();
1320
1321 // Three cases: global variables, lookup variables, and all other types of
1322 // variables.
1323 switch (var->location()) {
1324 case VariableLocation::GLOBAL:
1325 case VariableLocation::UNALLOCATED: {
1326 Comment cmnt(masm_, "[ Global variable");
1327 EmitGlobalVariableLoad(proxy, typeof_mode);
1328 context()->Plug(v0);
1329 break;
1330 }
1331
1332 case VariableLocation::PARAMETER:
1333 case VariableLocation::LOCAL:
1334 case VariableLocation::CONTEXT: {
1335 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1336 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1337 : "[ Stack variable");
1338 if (NeedsHoleCheckForLoad(proxy)) {
1339 // Let and const need a read barrier.
1340 GetVar(v0, var);
1341 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1342 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1343 if (var->mode() == LET || var->mode() == CONST) {
1344 // Throw a reference error when using an uninitialized let/const
1345 // binding in harmony mode.
1346 Label done;
1347 __ Branch(&done, ne, at, Operand(zero_reg));
1348 __ li(a0, Operand(var->name()));
1349 __ push(a0);
1350 __ CallRuntime(Runtime::kThrowReferenceError);
1351 __ bind(&done);
1352 } else {
1353 // Uninitialized legacy const bindings are unholed.
1354 DCHECK(var->mode() == CONST_LEGACY);
1355 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1356 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1357 }
1358 context()->Plug(v0);
1359 break;
1360 }
1361 context()->Plug(var);
1362 break;
1363 }
1364
1365 case VariableLocation::LOOKUP: {
1366 Comment cmnt(masm_, "[ Lookup variable");
1367 Label done, slow;
1368 // Generate code for loading from variables potentially shadowed
1369 // by eval-introduced variables.
1370 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1371 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001372 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001373 Runtime::FunctionId function_id =
1374 typeof_mode == NOT_INSIDE_TYPEOF
1375 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001376 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001377 __ CallRuntime(function_id);
1378 __ bind(&done);
1379 context()->Plug(v0);
1380 }
1381 }
1382}
1383
1384
1385void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1386 Comment cmnt(masm_, "[ RegExpLiteral");
1387 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1388 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1389 __ li(a1, Operand(expr->pattern()));
1390 __ li(a0, Operand(Smi::FromInt(expr->flags())));
1391 FastCloneRegExpStub stub(isolate());
1392 __ CallStub(&stub);
1393 context()->Plug(v0);
1394}
1395
1396
1397void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1398 Expression* expression = (property == NULL) ? NULL : property->value();
1399 if (expression == NULL) {
1400 __ LoadRoot(a1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001401 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001402 } else {
1403 VisitForStackValue(expression);
1404 if (NeedsHomeObject(expression)) {
1405 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1406 property->kind() == ObjectLiteral::Property::SETTER);
1407 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1408 EmitSetHomeObject(expression, offset, property->GetSlot());
1409 }
1410 }
1411}
1412
1413
1414void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1415 Comment cmnt(masm_, "[ ObjectLiteral");
1416
1417 Handle<FixedArray> constant_properties = expr->constant_properties();
1418 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1419 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1420 __ li(a1, Operand(constant_properties));
1421 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1422 if (MustCreateObjectLiteralWithRuntime(expr)) {
1423 __ Push(a3, a2, a1, a0);
1424 __ CallRuntime(Runtime::kCreateObjectLiteral);
1425 } else {
1426 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1427 __ CallStub(&stub);
1428 }
1429 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1430
1431 // If result_saved is true the result is on top of the stack. If
1432 // result_saved is false the result is in v0.
1433 bool result_saved = false;
1434
1435 AccessorTable accessor_table(zone());
1436 int property_index = 0;
1437 for (; property_index < expr->properties()->length(); property_index++) {
1438 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1439 if (property->is_computed_name()) break;
1440 if (property->IsCompileTimeValue()) continue;
1441
1442 Literal* key = property->key()->AsLiteral();
1443 Expression* value = property->value();
1444 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001445 PushOperand(v0); // Save result on stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001446 result_saved = true;
1447 }
1448 switch (property->kind()) {
1449 case ObjectLiteral::Property::CONSTANT:
1450 UNREACHABLE();
1451 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1452 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1453 // Fall through.
1454 case ObjectLiteral::Property::COMPUTED:
1455 // It is safe to use [[Put]] here because the boilerplate already
1456 // contains computed properties with an uninitialized value.
1457 if (key->value()->IsInternalizedString()) {
1458 if (property->emit_store()) {
1459 VisitForAccumulatorValue(value);
1460 __ mov(StoreDescriptor::ValueRegister(), result_register());
1461 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1462 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1463 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1464 EmitLoadStoreICSlot(property->GetSlot(0));
1465 CallStoreIC();
1466 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1467
1468 if (NeedsHomeObject(value)) {
1469 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1470 }
1471 } else {
1472 VisitForEffect(value);
1473 }
1474 break;
1475 }
1476 // Duplicate receiver on stack.
1477 __ ld(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001478 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001479 VisitForStackValue(key);
1480 VisitForStackValue(value);
1481 if (property->emit_store()) {
1482 if (NeedsHomeObject(value)) {
1483 EmitSetHomeObject(value, 2, property->GetSlot());
1484 }
1485 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001486 PushOperand(a0);
1487 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001488 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001489 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001490 }
1491 break;
1492 case ObjectLiteral::Property::PROTOTYPE:
1493 // Duplicate receiver on stack.
1494 __ ld(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001495 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496 VisitForStackValue(value);
1497 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001498 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001499 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1500 NO_REGISTERS);
1501 break;
1502 case ObjectLiteral::Property::GETTER:
1503 if (property->emit_store()) {
1504 accessor_table.lookup(key)->second->getter = property;
1505 }
1506 break;
1507 case ObjectLiteral::Property::SETTER:
1508 if (property->emit_store()) {
1509 accessor_table.lookup(key)->second->setter = property;
1510 }
1511 break;
1512 }
1513 }
1514
1515 // Emit code to define accessors, using only a single call to the runtime for
1516 // each pair of corresponding getters and setters.
1517 for (AccessorTable::Iterator it = accessor_table.begin();
1518 it != accessor_table.end();
1519 ++it) {
1520 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001521 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001522 VisitForStackValue(it->first);
1523 EmitAccessor(it->second->getter);
1524 EmitAccessor(it->second->setter);
1525 __ li(a0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001526 PushOperand(a0);
1527 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528 }
1529
1530 // Object literals have two parts. The "static" part on the left contains no
1531 // computed property names, and so we can compute its map ahead of time; see
1532 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1533 // starts with the first computed property name, and continues with all
1534 // properties to its right. All the code from above initializes the static
1535 // component of the object literal, and arranges for the map of the result to
1536 // reflect the static order in which the keys appear. For the dynamic
1537 // properties, we compile them into a series of "SetOwnProperty" runtime
1538 // calls. This will preserve insertion order.
1539 for (; property_index < expr->properties()->length(); property_index++) {
1540 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1541
1542 Expression* value = property->value();
1543 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001544 PushOperand(v0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001545 result_saved = true;
1546 }
1547
1548 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001549 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001550
1551 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1552 DCHECK(!property->is_computed_name());
1553 VisitForStackValue(value);
1554 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001555 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001556 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1557 NO_REGISTERS);
1558 } else {
1559 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1560 VisitForStackValue(value);
1561 if (NeedsHomeObject(value)) {
1562 EmitSetHomeObject(value, 2, property->GetSlot());
1563 }
1564
1565 switch (property->kind()) {
1566 case ObjectLiteral::Property::CONSTANT:
1567 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1568 case ObjectLiteral::Property::COMPUTED:
1569 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001570 PushOperand(Smi::FromInt(NONE));
1571 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1572 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001573 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001574 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575 }
1576 break;
1577
1578 case ObjectLiteral::Property::PROTOTYPE:
1579 UNREACHABLE();
1580 break;
1581
1582 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001583 PushOperand(Smi::FromInt(NONE));
1584 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 break;
1586
1587 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001588 PushOperand(Smi::FromInt(NONE));
1589 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590 break;
1591 }
1592 }
1593 }
1594
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001595 if (result_saved) {
1596 context()->PlugTOS();
1597 } else {
1598 context()->Plug(v0);
1599 }
1600}
1601
1602
1603void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1604 Comment cmnt(masm_, "[ ArrayLiteral");
1605
1606 Handle<FixedArray> constant_elements = expr->constant_elements();
1607 bool has_fast_elements =
1608 IsFastObjectElementsKind(expr->constant_elements_kind());
1609
1610 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1611 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1612 // If the only customer of allocation sites is transitioning, then
1613 // we can turn it off if we don't have anywhere else to transition to.
1614 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1615 }
1616
1617 __ mov(a0, result_register());
1618 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1619 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1620 __ li(a1, Operand(constant_elements));
1621 if (MustCreateArrayLiteralWithRuntime(expr)) {
1622 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1623 __ Push(a3, a2, a1, a0);
1624 __ CallRuntime(Runtime::kCreateArrayLiteral);
1625 } else {
1626 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1627 __ CallStub(&stub);
1628 }
1629 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1630
1631 bool result_saved = false; // Is the result saved to the stack?
1632 ZoneList<Expression*>* subexprs = expr->values();
1633 int length = subexprs->length();
1634
1635 // Emit code to evaluate all the non-constant subexpressions and to store
1636 // them into the newly cloned array.
1637 int array_index = 0;
1638 for (; array_index < length; array_index++) {
1639 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001640 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001641
1642 // If the subexpression is a literal or a simple materialized literal it
1643 // is already set in the cloned array.
1644 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1645
1646 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001647 PushOperand(v0); // array literal
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001648 result_saved = true;
1649 }
1650
1651 VisitForAccumulatorValue(subexpr);
1652
1653 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1654 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1655 __ mov(StoreDescriptor::ValueRegister(), result_register());
1656 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1657 Handle<Code> ic =
1658 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1659 CallIC(ic);
1660
1661 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1662 }
1663
1664 // In case the array literal contains spread expressions it has two parts. The
1665 // first part is the "static" array which has a literal index is handled
1666 // above. The second part is the part after the first spread expression
1667 // (inclusive) and these elements gets appended to the array. Note that the
1668 // number elements an iterable produces is unknown ahead of time.
1669 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001670 PopOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001671 result_saved = false;
1672 }
1673 for (; array_index < length; array_index++) {
1674 Expression* subexpr = subexprs->at(array_index);
1675
Ben Murdoch097c5b22016-05-18 11:27:45 +01001676 PushOperand(v0);
1677 DCHECK(!subexpr->IsSpread());
1678 VisitForStackValue(subexpr);
1679 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001680
1681 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1682 }
1683
1684 if (result_saved) {
1685 context()->PlugTOS();
1686 } else {
1687 context()->Plug(v0);
1688 }
1689}
1690
1691
1692void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1693 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1694
1695 Comment cmnt(masm_, "[ Assignment");
1696 SetExpressionPosition(expr, INSERT_BREAK);
1697
1698 Property* property = expr->target()->AsProperty();
1699 LhsKind assign_type = Property::GetAssignType(property);
1700
1701 // Evaluate LHS expression.
1702 switch (assign_type) {
1703 case VARIABLE:
1704 // Nothing to do here.
1705 break;
1706 case NAMED_PROPERTY:
1707 if (expr->is_compound()) {
1708 // We need the receiver both on the stack and in the register.
1709 VisitForStackValue(property->obj());
1710 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1711 } else {
1712 VisitForStackValue(property->obj());
1713 }
1714 break;
1715 case NAMED_SUPER_PROPERTY:
1716 VisitForStackValue(
1717 property->obj()->AsSuperPropertyReference()->this_var());
1718 VisitForAccumulatorValue(
1719 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001720 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721 if (expr->is_compound()) {
1722 const Register scratch = a1;
1723 __ ld(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001724 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001725 }
1726 break;
1727 case KEYED_SUPER_PROPERTY: {
1728 const Register scratch = a1;
1729 VisitForStackValue(
1730 property->obj()->AsSuperPropertyReference()->this_var());
1731 VisitForAccumulatorValue(
1732 property->obj()->AsSuperPropertyReference()->home_object());
1733 __ Move(scratch, result_register());
1734 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001735 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001736 if (expr->is_compound()) {
1737 const Register scratch1 = a4;
1738 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001739 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001740 }
1741 break;
1742 }
1743 case KEYED_PROPERTY:
1744 // We need the key and receiver on both the stack and in v0 and a1.
1745 if (expr->is_compound()) {
1746 VisitForStackValue(property->obj());
1747 VisitForStackValue(property->key());
1748 __ ld(LoadDescriptor::ReceiverRegister(),
1749 MemOperand(sp, 1 * kPointerSize));
1750 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1751 } else {
1752 VisitForStackValue(property->obj());
1753 VisitForStackValue(property->key());
1754 }
1755 break;
1756 }
1757
1758 // For compound assignments we need another deoptimization point after the
1759 // variable/property load.
1760 if (expr->is_compound()) {
1761 { AccumulatorValueContext context(this);
1762 switch (assign_type) {
1763 case VARIABLE:
1764 EmitVariableLoad(expr->target()->AsVariableProxy());
1765 PrepareForBailout(expr->target(), TOS_REG);
1766 break;
1767 case NAMED_PROPERTY:
1768 EmitNamedPropertyLoad(property);
1769 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1770 break;
1771 case NAMED_SUPER_PROPERTY:
1772 EmitNamedSuperPropertyLoad(property);
1773 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1774 break;
1775 case KEYED_SUPER_PROPERTY:
1776 EmitKeyedSuperPropertyLoad(property);
1777 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1778 break;
1779 case KEYED_PROPERTY:
1780 EmitKeyedPropertyLoad(property);
1781 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1782 break;
1783 }
1784 }
1785
1786 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001787 PushOperand(v0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001788 VisitForAccumulatorValue(expr->value());
1789
1790 AccumulatorValueContext context(this);
1791 if (ShouldInlineSmiCase(op)) {
1792 EmitInlineSmiBinaryOp(expr->binary_operation(),
1793 op,
1794 expr->target(),
1795 expr->value());
1796 } else {
1797 EmitBinaryOp(expr->binary_operation(), op);
1798 }
1799
1800 // Deoptimization point in case the binary operation may have side effects.
1801 PrepareForBailout(expr->binary_operation(), TOS_REG);
1802 } else {
1803 VisitForAccumulatorValue(expr->value());
1804 }
1805
1806 SetExpressionPosition(expr);
1807
1808 // Store the value.
1809 switch (assign_type) {
1810 case VARIABLE:
1811 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1812 expr->op(), expr->AssignmentSlot());
1813 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1814 context()->Plug(v0);
1815 break;
1816 case NAMED_PROPERTY:
1817 EmitNamedPropertyAssignment(expr);
1818 break;
1819 case NAMED_SUPER_PROPERTY:
1820 EmitNamedSuperPropertyStore(property);
1821 context()->Plug(v0);
1822 break;
1823 case KEYED_SUPER_PROPERTY:
1824 EmitKeyedSuperPropertyStore(property);
1825 context()->Plug(v0);
1826 break;
1827 case KEYED_PROPERTY:
1828 EmitKeyedPropertyAssignment(expr);
1829 break;
1830 }
1831}
1832
1833
1834void FullCodeGenerator::VisitYield(Yield* expr) {
1835 Comment cmnt(masm_, "[ Yield");
1836 SetExpressionPosition(expr);
1837
1838 // Evaluate yielded value first; the initial iterator definition depends on
1839 // this. It stays on the stack while we update the iterator.
1840 VisitForStackValue(expr->expression());
1841
Ben Murdochda12d292016-06-02 14:46:10 +01001842 Label suspend, continuation, post_runtime, resume;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001843
Ben Murdochda12d292016-06-02 14:46:10 +01001844 __ jmp(&suspend);
1845 __ bind(&continuation);
1846 // When we arrive here, the stack top is the resume mode and
1847 // result_register() holds the input value (the argument given to the
1848 // respective resume operation).
1849 __ RecordGeneratorContinuation();
1850 __ pop(a1);
1851 __ Branch(&resume, ne, a1, Operand(Smi::FromInt(JSGeneratorObject::RETURN)));
1852 __ push(result_register());
1853 EmitCreateIteratorResult(true);
1854 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001855
Ben Murdochda12d292016-06-02 14:46:10 +01001856 __ bind(&suspend);
1857 OperandStackDepthIncrement(1); // Not popped on this path.
1858 VisitForAccumulatorValue(expr->generator_object());
1859 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1860 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1861 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1862 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1863 __ mov(a1, cp);
1864 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1865 kRAHasBeenSaved, kDontSaveFPRegs);
1866 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1867 __ Branch(&post_runtime, eq, sp, Operand(a1));
1868 __ push(v0); // generator object
1869 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1870 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1871 __ bind(&post_runtime);
1872 PopOperand(result_register());
1873 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001874
Ben Murdochda12d292016-06-02 14:46:10 +01001875 __ bind(&resume);
1876 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001877}
1878
1879
1880void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1881 Expression *value,
1882 JSGeneratorObject::ResumeMode resume_mode) {
1883 // The value stays in a0, and is ultimately read by the resumed generator, as
1884 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1885 // is read to throw the value when the resumed generator is already closed.
1886 // a1 will hold the generator object until the activation has been resumed.
1887 VisitForStackValue(generator);
1888 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001889 PopOperand(a1);
1890
1891 // Store input value into generator object.
1892 __ sd(result_register(),
1893 FieldMemOperand(a1, JSGeneratorObject::kInputOffset));
1894 __ mov(a2, result_register());
1895 __ RecordWriteField(a1, JSGeneratorObject::kInputOffset, a2, a3,
1896 kRAHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001897
1898 // Load suspended function and context.
1899 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
1900 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
1901
1902 // Load receiver and store as the first argument.
1903 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
1904 __ push(a2);
1905
Ben Murdochda12d292016-06-02 14:46:10 +01001906 // Push holes for arguments to generator function. Since the parser forced
1907 // context allocation for any variables in generators, the actual argument
1908 // values have already been copied into the context and these dummy values
1909 // will never be used.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001910 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
1911 // The argument count is stored as int32_t on 64-bit platforms.
1912 // TODO(plind): Smi on 32-bit platforms.
1913 __ lw(a3,
1914 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1915 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
1916 Label push_argument_holes, push_frame;
1917 __ bind(&push_argument_holes);
1918 __ Dsubu(a3, a3, Operand(1));
1919 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
1920 __ push(a2);
1921 __ jmp(&push_argument_holes);
1922
1923 // Enter a new JavaScript frame, and initialize its slots as they were when
1924 // the generator was suspended.
1925 Label resume_frame, done;
1926 __ bind(&push_frame);
1927 __ Call(&resume_frame);
1928 __ jmp(&done);
1929 __ bind(&resume_frame);
1930 // ra = return address.
1931 // fp = caller's frame pointer.
1932 // cp = callee's context,
1933 // a4 = callee's JS function.
Ben Murdochda12d292016-06-02 14:46:10 +01001934 __ PushStandardFrame(a4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001935
1936 // Load the operand stack size.
1937 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
1938 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
1939 __ SmiUntag(a3);
1940
1941 // If we are sending a value and there is no operand stack, we can jump back
1942 // in directly.
1943 if (resume_mode == JSGeneratorObject::NEXT) {
1944 Label slow_resume;
1945 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
1946 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
1947 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
1948 __ SmiUntag(a2);
1949 __ Daddu(a3, a3, Operand(a2));
1950 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
1951 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001952 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001953 __ Jump(a3);
1954 __ bind(&slow_resume);
1955 }
1956
1957 // Otherwise, we push holes for the operand stack and call the runtime to fix
1958 // up the stack and the handlers.
1959 Label push_operand_holes, call_resume;
1960 __ bind(&push_operand_holes);
1961 __ Dsubu(a3, a3, Operand(1));
1962 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
1963 __ push(a2);
1964 __ Branch(&push_operand_holes);
1965 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001966 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001967 DCHECK(!result_register().is(a1));
1968 __ Push(a1, result_register());
1969 __ Push(Smi::FromInt(resume_mode));
1970 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
1971 // Not reached: the runtime call returns elsewhere.
1972 __ stop("not-reached");
1973
1974 __ bind(&done);
1975 context()->Plug(result_register());
1976}
1977
Ben Murdoch097c5b22016-05-18 11:27:45 +01001978void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1979 OperandStackDepthIncrement(2);
1980 __ Push(reg1, reg2);
1981}
1982
1983void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1984 Register reg3) {
1985 OperandStackDepthIncrement(3);
1986 __ Push(reg1, reg2, reg3);
1987}
1988
1989void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1990 Register reg3, Register reg4) {
1991 OperandStackDepthIncrement(4);
1992 __ Push(reg1, reg2, reg3, reg4);
1993}
1994
1995void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1996 OperandStackDepthDecrement(2);
1997 __ Pop(reg1, reg2);
1998}
1999
2000void FullCodeGenerator::EmitOperandStackDepthCheck() {
2001 if (FLAG_debug_code) {
2002 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
2003 operand_stack_depth_ * kPointerSize;
2004 __ Dsubu(v0, fp, sp);
2005 __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
2006 }
2007}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002008
2009void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2010 Label allocate, done_allocate;
2011
2012 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate, TAG_OBJECT);
2013 __ jmp(&done_allocate);
2014
2015 __ bind(&allocate);
2016 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2017 __ CallRuntime(Runtime::kAllocateInNewSpace);
2018
2019 __ bind(&done_allocate);
2020 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
Ben Murdochda12d292016-06-02 14:46:10 +01002021 PopOperand(a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002022 __ LoadRoot(a3,
2023 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2024 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
2025 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2026 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2027 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2028 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2029 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2030 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2031}
2032
2033
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002034void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2035 Token::Value op,
2036 Expression* left_expr,
2037 Expression* right_expr) {
2038 Label done, smi_case, stub_call;
2039
2040 Register scratch1 = a2;
2041 Register scratch2 = a3;
2042
2043 // Get the arguments.
2044 Register left = a1;
2045 Register right = a0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002046 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002047 __ mov(a0, result_register());
2048
2049 // Perform combined smi check on both operands.
2050 __ Or(scratch1, left, Operand(right));
2051 STATIC_ASSERT(kSmiTag == 0);
2052 JumpPatchSite patch_site(masm_);
2053 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2054
2055 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002056 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057 CallIC(code, expr->BinaryOperationFeedbackId());
2058 patch_site.EmitPatchInfo();
2059 __ jmp(&done);
2060
2061 __ bind(&smi_case);
2062 // Smi case. This code works the same way as the smi-smi case in the type
2063 // recording binary operation stub, see
2064 switch (op) {
2065 case Token::SAR:
2066 __ GetLeastBitsFromSmi(scratch1, right, 5);
2067 __ dsrav(right, left, scratch1);
2068 __ And(v0, right, Operand(0xffffffff00000000L));
2069 break;
2070 case Token::SHL: {
2071 __ SmiUntag(scratch1, left);
2072 __ GetLeastBitsFromSmi(scratch2, right, 5);
2073 __ dsllv(scratch1, scratch1, scratch2);
2074 __ SmiTag(v0, scratch1);
2075 break;
2076 }
2077 case Token::SHR: {
2078 __ SmiUntag(scratch1, left);
2079 __ GetLeastBitsFromSmi(scratch2, right, 5);
2080 __ dsrlv(scratch1, scratch1, scratch2);
2081 __ And(scratch2, scratch1, 0x80000000);
2082 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2083 __ SmiTag(v0, scratch1);
2084 break;
2085 }
2086 case Token::ADD:
Ben Murdochda12d292016-06-02 14:46:10 +01002087 __ DaddBranchOvf(v0, left, Operand(right), &stub_call);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002088 break;
2089 case Token::SUB:
Ben Murdochda12d292016-06-02 14:46:10 +01002090 __ DsubBranchOvf(v0, left, Operand(right), &stub_call);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002091 break;
2092 case Token::MUL: {
2093 __ Dmulh(v0, left, right);
2094 __ dsra32(scratch2, v0, 0);
2095 __ sra(scratch1, v0, 31);
2096 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2097 __ SmiTag(v0);
2098 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2099 __ Daddu(scratch2, right, left);
2100 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2101 DCHECK(Smi::FromInt(0) == 0);
2102 __ mov(v0, zero_reg);
2103 break;
2104 }
2105 case Token::BIT_OR:
2106 __ Or(v0, left, Operand(right));
2107 break;
2108 case Token::BIT_AND:
2109 __ And(v0, left, Operand(right));
2110 break;
2111 case Token::BIT_XOR:
2112 __ Xor(v0, left, Operand(right));
2113 break;
2114 default:
2115 UNREACHABLE();
2116 }
2117
2118 __ bind(&done);
2119 context()->Plug(v0);
2120}
2121
2122
2123void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002124 for (int i = 0; i < lit->properties()->length(); i++) {
2125 ObjectLiteral::Property* property = lit->properties()->at(i);
2126 Expression* value = property->value();
2127
Ben Murdoch097c5b22016-05-18 11:27:45 +01002128 Register scratch = a1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002129 if (property->is_static()) {
2130 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2131 } else {
2132 __ ld(scratch, MemOperand(sp, 0)); // prototype
2133 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002134 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002135 EmitPropertyKey(property, lit->GetIdForProperty(i));
2136
2137 // The static prototype property is read only. We handle the non computed
2138 // property name case in the parser. Since this is the only case where we
2139 // need to check for an own read only property we special case this so we do
2140 // not need to do this for every property.
2141 if (property->is_static() && property->is_computed_name()) {
2142 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2143 __ push(v0);
2144 }
2145
2146 VisitForStackValue(value);
2147 if (NeedsHomeObject(value)) {
2148 EmitSetHomeObject(value, 2, property->GetSlot());
2149 }
2150
2151 switch (property->kind()) {
2152 case ObjectLiteral::Property::CONSTANT:
2153 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2154 case ObjectLiteral::Property::PROTOTYPE:
2155 UNREACHABLE();
2156 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002157 PushOperand(Smi::FromInt(DONT_ENUM));
2158 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2159 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 break;
2161
2162 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002163 PushOperand(Smi::FromInt(DONT_ENUM));
2164 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002165 break;
2166
2167 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002168 PushOperand(Smi::FromInt(DONT_ENUM));
2169 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002170 break;
2171
2172 default:
2173 UNREACHABLE();
2174 }
2175 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002176}
2177
2178
2179void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2180 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002181 PopOperand(a1);
2182 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002183 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2184 CallIC(code, expr->BinaryOperationFeedbackId());
2185 patch_site.EmitPatchInfo();
2186 context()->Plug(v0);
2187}
2188
2189
2190void FullCodeGenerator::EmitAssignment(Expression* expr,
2191 FeedbackVectorSlot slot) {
2192 DCHECK(expr->IsValidReferenceExpressionOrThis());
2193
2194 Property* prop = expr->AsProperty();
2195 LhsKind assign_type = Property::GetAssignType(prop);
2196
2197 switch (assign_type) {
2198 case VARIABLE: {
2199 Variable* var = expr->AsVariableProxy()->var();
2200 EffectContext context(this);
2201 EmitVariableAssignment(var, Token::ASSIGN, slot);
2202 break;
2203 }
2204 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002205 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002206 VisitForAccumulatorValue(prop->obj());
2207 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002208 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002209 __ li(StoreDescriptor::NameRegister(),
2210 Operand(prop->key()->AsLiteral()->value()));
2211 EmitLoadStoreICSlot(slot);
2212 CallStoreIC();
2213 break;
2214 }
2215 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002216 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002217 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2218 VisitForAccumulatorValue(
2219 prop->obj()->AsSuperPropertyReference()->home_object());
2220 // stack: value, this; v0: home_object
2221 Register scratch = a2;
2222 Register scratch2 = a3;
2223 __ mov(scratch, result_register()); // home_object
2224 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2225 __ ld(scratch2, MemOperand(sp, 0)); // this
2226 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2227 __ sd(scratch, MemOperand(sp, 0)); // home_object
2228 // stack: this, home_object; v0: value
2229 EmitNamedSuperPropertyStore(prop);
2230 break;
2231 }
2232 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002233 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002234 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2235 VisitForStackValue(
2236 prop->obj()->AsSuperPropertyReference()->home_object());
2237 VisitForAccumulatorValue(prop->key());
2238 Register scratch = a2;
2239 Register scratch2 = a3;
2240 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2241 // stack: value, this, home_object; v0: key, a3: value
2242 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2243 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2244 __ ld(scratch, MemOperand(sp, 0)); // home_object
2245 __ sd(scratch, MemOperand(sp, kPointerSize));
2246 __ sd(v0, MemOperand(sp, 0));
2247 __ Move(v0, scratch2);
2248 // stack: this, home_object, key; v0: value.
2249 EmitKeyedSuperPropertyStore(prop);
2250 break;
2251 }
2252 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002253 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002254 VisitForStackValue(prop->obj());
2255 VisitForAccumulatorValue(prop->key());
2256 __ Move(StoreDescriptor::NameRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002257 PopOperands(StoreDescriptor::ValueRegister(),
2258 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002259 EmitLoadStoreICSlot(slot);
2260 Handle<Code> ic =
2261 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2262 CallIC(ic);
2263 break;
2264 }
2265 }
2266 context()->Plug(v0);
2267}
2268
2269
2270void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2271 Variable* var, MemOperand location) {
2272 __ sd(result_register(), location);
2273 if (var->IsContextSlot()) {
2274 // RecordWrite may destroy all its register arguments.
2275 __ Move(a3, result_register());
2276 int offset = Context::SlotOffset(var->index());
2277 __ RecordWriteContextSlot(
2278 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2279 }
2280}
2281
2282
2283void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2284 FeedbackVectorSlot slot) {
2285 if (var->IsUnallocated()) {
2286 // Global var, const, or let.
2287 __ mov(StoreDescriptor::ValueRegister(), result_register());
2288 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2289 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2290 EmitLoadStoreICSlot(slot);
2291 CallStoreIC();
2292
2293 } else if (var->mode() == LET && op != Token::INIT) {
2294 // Non-initializing assignment to let variable needs a write barrier.
2295 DCHECK(!var->IsLookupSlot());
2296 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2297 Label assign;
2298 MemOperand location = VarOperand(var, a1);
2299 __ ld(a3, location);
2300 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2301 __ Branch(&assign, ne, a3, Operand(a4));
2302 __ li(a3, Operand(var->name()));
2303 __ push(a3);
2304 __ CallRuntime(Runtime::kThrowReferenceError);
2305 // Perform the assignment.
2306 __ bind(&assign);
2307 EmitStoreToStackLocalOrContextSlot(var, location);
2308
2309 } else if (var->mode() == CONST && op != Token::INIT) {
2310 // Assignment to const variable needs a write barrier.
2311 DCHECK(!var->IsLookupSlot());
2312 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2313 Label const_error;
2314 MemOperand location = VarOperand(var, a1);
2315 __ ld(a3, location);
2316 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2317 __ Branch(&const_error, ne, a3, Operand(at));
2318 __ li(a3, Operand(var->name()));
2319 __ push(a3);
2320 __ CallRuntime(Runtime::kThrowReferenceError);
2321 __ bind(&const_error);
2322 __ CallRuntime(Runtime::kThrowConstAssignError);
2323
2324 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2325 // Initializing assignment to const {this} needs a write barrier.
2326 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2327 Label uninitialized_this;
2328 MemOperand location = VarOperand(var, a1);
2329 __ ld(a3, location);
2330 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2331 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2332 __ li(a0, Operand(var->name()));
2333 __ Push(a0);
2334 __ CallRuntime(Runtime::kThrowReferenceError);
2335 __ bind(&uninitialized_this);
2336 EmitStoreToStackLocalOrContextSlot(var, location);
2337
2338 } else if (!var->is_const_mode() ||
2339 (var->mode() == CONST && op == Token::INIT)) {
2340 if (var->IsLookupSlot()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002341 __ Push(var->name());
2342 __ Push(v0);
2343 __ CallRuntime(is_strict(language_mode())
2344 ? Runtime::kStoreLookupSlot_Strict
2345 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002346 } else {
2347 // Assignment to var or initializing assignment to let/const in harmony
2348 // mode.
2349 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2350 MemOperand location = VarOperand(var, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002351 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002352 // Check for an uninitialized let binding.
2353 __ ld(a2, location);
2354 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2355 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2356 }
2357 EmitStoreToStackLocalOrContextSlot(var, location);
2358 }
2359
2360 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2361 // Const initializers need a write barrier.
2362 DCHECK(!var->IsParameter()); // No const parameters.
2363 if (var->IsLookupSlot()) {
2364 __ li(a0, Operand(var->name()));
2365 __ Push(v0, cp, a0); // Context and name.
2366 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2367 } else {
2368 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2369 Label skip;
2370 MemOperand location = VarOperand(var, a1);
2371 __ ld(a2, location);
2372 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2373 __ Branch(&skip, ne, a2, Operand(at));
2374 EmitStoreToStackLocalOrContextSlot(var, location);
2375 __ bind(&skip);
2376 }
2377
2378 } else {
2379 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2380 if (is_strict(language_mode())) {
2381 __ CallRuntime(Runtime::kThrowConstAssignError);
2382 }
2383 // Silently ignore store in sloppy mode.
2384 }
2385}
2386
2387
2388void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2389 // Assignment to a property, using a named store IC.
2390 Property* prop = expr->target()->AsProperty();
2391 DCHECK(prop != NULL);
2392 DCHECK(prop->key()->IsLiteral());
2393
2394 __ mov(StoreDescriptor::ValueRegister(), result_register());
2395 __ li(StoreDescriptor::NameRegister(),
2396 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002397 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002398 EmitLoadStoreICSlot(expr->AssignmentSlot());
2399 CallStoreIC();
2400
2401 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2402 context()->Plug(v0);
2403}
2404
2405
2406void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2407 // Assignment to named property of super.
2408 // v0 : value
2409 // stack : receiver ('this'), home_object
2410 DCHECK(prop != NULL);
2411 Literal* key = prop->key()->AsLiteral();
2412 DCHECK(key != NULL);
2413
Ben Murdoch097c5b22016-05-18 11:27:45 +01002414 PushOperand(key->value());
2415 PushOperand(v0);
2416 CallRuntimeWithOperands(is_strict(language_mode())
2417 ? Runtime::kStoreToSuper_Strict
2418 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002419}
2420
2421
2422void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2423 // Assignment to named property of super.
2424 // v0 : value
2425 // stack : receiver ('this'), home_object, key
2426 DCHECK(prop != NULL);
2427
Ben Murdoch097c5b22016-05-18 11:27:45 +01002428 PushOperand(v0);
2429 CallRuntimeWithOperands(is_strict(language_mode())
2430 ? Runtime::kStoreKeyedToSuper_Strict
2431 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002432}
2433
2434
2435void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2436 // Assignment to a property, using a keyed store IC.
2437 // Call keyed store IC.
2438 // The arguments are:
2439 // - a0 is the value,
2440 // - a1 is the key,
2441 // - a2 is the receiver.
2442 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002443 PopOperands(StoreDescriptor::ReceiverRegister(),
2444 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002445 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2446
2447 Handle<Code> ic =
2448 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2449 EmitLoadStoreICSlot(expr->AssignmentSlot());
2450 CallIC(ic);
2451
2452 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2453 context()->Plug(v0);
2454}
2455
2456
2457void FullCodeGenerator::VisitProperty(Property* expr) {
2458 Comment cmnt(masm_, "[ Property");
2459 SetExpressionPosition(expr);
2460
2461 Expression* key = expr->key();
2462
2463 if (key->IsPropertyName()) {
2464 if (!expr->IsSuperAccess()) {
2465 VisitForAccumulatorValue(expr->obj());
2466 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2467 EmitNamedPropertyLoad(expr);
2468 } else {
2469 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2470 VisitForStackValue(
2471 expr->obj()->AsSuperPropertyReference()->home_object());
2472 EmitNamedSuperPropertyLoad(expr);
2473 }
2474 } else {
2475 if (!expr->IsSuperAccess()) {
2476 VisitForStackValue(expr->obj());
2477 VisitForAccumulatorValue(expr->key());
2478 __ Move(LoadDescriptor::NameRegister(), v0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002479 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002480 EmitKeyedPropertyLoad(expr);
2481 } else {
2482 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2483 VisitForStackValue(
2484 expr->obj()->AsSuperPropertyReference()->home_object());
2485 VisitForStackValue(expr->key());
2486 EmitKeyedSuperPropertyLoad(expr);
2487 }
2488 }
2489 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2490 context()->Plug(v0);
2491}
2492
2493
2494void FullCodeGenerator::CallIC(Handle<Code> code,
2495 TypeFeedbackId id) {
2496 ic_total_count_++;
2497 __ Call(code, RelocInfo::CODE_TARGET, id);
2498}
2499
2500
2501// Code common for calls using the IC.
2502void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2503 Expression* callee = expr->expression();
2504
2505 // Get the target function.
2506 ConvertReceiverMode convert_mode;
2507 if (callee->IsVariableProxy()) {
2508 { StackValueContext context(this);
2509 EmitVariableLoad(callee->AsVariableProxy());
2510 PrepareForBailout(callee, NO_REGISTERS);
2511 }
2512 // Push undefined as receiver. This is patched in the method prologue if it
2513 // is a sloppy mode method.
2514 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002515 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002516 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2517 } else {
2518 // Load the function from the receiver.
2519 DCHECK(callee->IsProperty());
2520 DCHECK(!callee->AsProperty()->IsSuperAccess());
2521 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2522 EmitNamedPropertyLoad(callee->AsProperty());
2523 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2524 // Push the target function under the receiver.
2525 __ ld(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002526 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002527 __ sd(v0, MemOperand(sp, kPointerSize));
2528 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2529 }
2530
2531 EmitCall(expr, convert_mode);
2532}
2533
2534
2535void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2536 SetExpressionPosition(expr);
2537 Expression* callee = expr->expression();
2538 DCHECK(callee->IsProperty());
2539 Property* prop = callee->AsProperty();
2540 DCHECK(prop->IsSuperAccess());
2541
2542 Literal* key = prop->key()->AsLiteral();
2543 DCHECK(!key->value()->IsSmi());
2544 // Load the function from the receiver.
2545 const Register scratch = a1;
2546 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2547 VisitForAccumulatorValue(super_ref->home_object());
2548 __ mov(scratch, v0);
2549 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002550 PushOperands(scratch, v0, v0, scratch);
2551 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002552
2553 // Stack here:
2554 // - home_object
2555 // - this (receiver)
2556 // - this (receiver) <-- LoadFromSuper will pop here and below.
2557 // - home_object
2558 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002559 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002560
2561 // Replace home_object with target function.
2562 __ sd(v0, MemOperand(sp, kPointerSize));
2563
2564 // Stack here:
2565 // - target function
2566 // - this (receiver)
2567 EmitCall(expr);
2568}
2569
2570
2571// Code common for calls using the IC.
2572void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2573 Expression* key) {
2574 // Load the key.
2575 VisitForAccumulatorValue(key);
2576
2577 Expression* callee = expr->expression();
2578
2579 // Load the function from the receiver.
2580 DCHECK(callee->IsProperty());
2581 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2582 __ Move(LoadDescriptor::NameRegister(), v0);
2583 EmitKeyedPropertyLoad(callee->AsProperty());
2584 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2585
2586 // Push the target function under the receiver.
2587 __ ld(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002588 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002589 __ sd(v0, MemOperand(sp, kPointerSize));
2590
2591 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2592}
2593
2594
2595void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2596 Expression* callee = expr->expression();
2597 DCHECK(callee->IsProperty());
2598 Property* prop = callee->AsProperty();
2599 DCHECK(prop->IsSuperAccess());
2600
2601 SetExpressionPosition(prop);
2602 // Load the function from the receiver.
2603 const Register scratch = a1;
2604 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2605 VisitForAccumulatorValue(super_ref->home_object());
2606 __ Move(scratch, v0);
2607 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002608 PushOperands(scratch, v0, v0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002609 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002610
2611 // Stack here:
2612 // - home_object
2613 // - this (receiver)
2614 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2615 // - home_object
2616 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002617 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002618
2619 // Replace home_object with target function.
2620 __ sd(v0, MemOperand(sp, kPointerSize));
2621
2622 // Stack here:
2623 // - target function
2624 // - this (receiver)
2625 EmitCall(expr);
2626}
2627
2628
2629void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2630 // Load the arguments.
2631 ZoneList<Expression*>* args = expr->arguments();
2632 int arg_count = args->length();
2633 for (int i = 0; i < arg_count; i++) {
2634 VisitForStackValue(args->at(i));
2635 }
2636
2637 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2638 // Record source position of the IC call.
Ben Murdochda12d292016-06-02 14:46:10 +01002639 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002640 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2641 if (FLAG_trace) {
2642 __ CallRuntime(Runtime::kTraceTailCall);
2643 }
2644 // Update profiling counters before the tail call since we will
2645 // not return to this function.
2646 EmitProfilingCounterHandlingForReturnSequence(true);
2647 }
2648 Handle<Code> ic =
2649 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2650 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002651 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2652 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2653 // Don't assign a type feedback id to the IC, since type feedback is provided
2654 // by the vector above.
2655 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002656 OperandStackDepthDecrement(arg_count + 1);
2657
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002658 RecordJSReturnSite(expr);
2659 // Restore context register.
2660 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2661 context()->DropAndPlug(1, v0);
2662}
2663
2664
2665void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2666 // a6: copy of the first argument or undefined if it doesn't exist.
2667 if (arg_count > 0) {
2668 __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
2669 } else {
2670 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
2671 }
2672
2673 // a5: the receiver of the enclosing function.
2674 __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2675
2676 // a4: the language mode.
2677 __ li(a4, Operand(Smi::FromInt(language_mode())));
2678
2679 // a1: the start position of the scope the calls resides in.
2680 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2681
2682 // Do the runtime call.
2683 __ Push(a6, a5, a4, a1);
2684 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2685}
2686
2687
2688// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2689void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2690 VariableProxy* callee = expr->expression()->AsVariableProxy();
2691 if (callee->var()->IsLookupSlot()) {
2692 Label slow, done;
2693
2694 SetExpressionPosition(callee);
2695 // Generate code for loading from variables potentially shadowed by
2696 // eval-introduced variables.
2697 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2698
2699 __ bind(&slow);
2700 // Call the runtime to find the function to call (returned in v0)
2701 // and the object holding it (returned in v1).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002702 __ Push(callee->name());
2703 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2704 PushOperands(v0, v1); // Function, receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002705 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2706
2707 // If fast case code has been generated, emit code to push the
2708 // function and receiver and have the slow path jump around this
2709 // code.
2710 if (done.is_linked()) {
2711 Label call;
2712 __ Branch(&call);
2713 __ bind(&done);
2714 // Push function.
2715 __ push(v0);
2716 // The receiver is implicitly the global receiver. Indicate this
2717 // by passing the hole to the call function stub.
2718 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2719 __ push(a1);
2720 __ bind(&call);
2721 }
2722 } else {
2723 VisitForStackValue(callee);
2724 // refEnv.WithBaseObject()
2725 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002726 PushOperand(a2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002727 }
2728}
2729
2730
2731void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2732 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2733 // to resolve the function we need to call. Then we call the resolved
2734 // function using the given arguments.
2735 ZoneList<Expression*>* args = expr->arguments();
2736 int arg_count = args->length();
2737 PushCalleeAndWithBaseObject(expr);
2738
2739 // Push the arguments.
2740 for (int i = 0; i < arg_count; i++) {
2741 VisitForStackValue(args->at(i));
2742 }
2743
2744 // Push a copy of the function (found below the arguments) and
2745 // resolve eval.
2746 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2747 __ push(a1);
2748 EmitResolvePossiblyDirectEval(arg_count);
2749
2750 // Touch up the stack with the resolved function.
2751 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2752
2753 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2754 // Record source position for debugger.
2755 SetCallPosition(expr);
2756 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2757 __ li(a0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002758 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2759 expr->tail_call_mode()),
2760 RelocInfo::CODE_TARGET);
2761 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002762 RecordJSReturnSite(expr);
2763 // Restore context register.
2764 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2765 context()->DropAndPlug(1, v0);
2766}
2767
2768
2769void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2770 Comment cmnt(masm_, "[ CallNew");
2771 // According to ECMA-262, section 11.2.2, page 44, the function
2772 // expression in new calls must be evaluated before the
2773 // arguments.
2774
2775 // Push constructor on the stack. If it's not a function it's used as
2776 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2777 // ignored.
2778 DCHECK(!expr->expression()->IsSuperPropertyReference());
2779 VisitForStackValue(expr->expression());
2780
2781 // Push the arguments ("left-to-right") on the stack.
2782 ZoneList<Expression*>* args = expr->arguments();
2783 int arg_count = args->length();
2784 for (int i = 0; i < arg_count; i++) {
2785 VisitForStackValue(args->at(i));
2786 }
2787
2788 // Call the construct call builtin that handles allocation and
2789 // constructor invocation.
2790 SetConstructCallPosition(expr);
2791
2792 // Load function and argument count into a1 and a0.
2793 __ li(a0, Operand(arg_count));
2794 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2795
2796 // Record call targets in unoptimized code.
2797 __ EmitLoadTypeFeedbackVector(a2);
2798 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2799
2800 CallConstructStub stub(isolate());
2801 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002802 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002803 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2804 // Restore context register.
2805 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2806 context()->Plug(v0);
2807}
2808
2809
2810void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2811 SuperCallReference* super_call_ref =
2812 expr->expression()->AsSuperCallReference();
2813 DCHECK_NOT_NULL(super_call_ref);
2814
2815 // Push the super constructor target on the stack (may be null,
2816 // but the Construct builtin can deal with that properly).
2817 VisitForAccumulatorValue(super_call_ref->this_function_var());
2818 __ AssertFunction(result_register());
2819 __ ld(result_register(),
2820 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2821 __ ld(result_register(),
2822 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002823 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002824
2825 // Push the arguments ("left-to-right") on the stack.
2826 ZoneList<Expression*>* args = expr->arguments();
2827 int arg_count = args->length();
2828 for (int i = 0; i < arg_count; i++) {
2829 VisitForStackValue(args->at(i));
2830 }
2831
2832 // Call the construct call builtin that handles allocation and
2833 // constructor invocation.
2834 SetConstructCallPosition(expr);
2835
2836 // Load new target into a3.
2837 VisitForAccumulatorValue(super_call_ref->new_target_var());
2838 __ mov(a3, result_register());
2839
2840 // Load function and argument count into a1 and a0.
2841 __ li(a0, Operand(arg_count));
2842 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2843
2844 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002845 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002846
2847 RecordJSReturnSite(expr);
2848
2849 // Restore context register.
2850 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2851 context()->Plug(v0);
2852}
2853
2854
2855void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2856 ZoneList<Expression*>* args = expr->arguments();
2857 DCHECK(args->length() == 1);
2858
2859 VisitForAccumulatorValue(args->at(0));
2860
2861 Label materialize_true, materialize_false;
2862 Label* if_true = NULL;
2863 Label* if_false = NULL;
2864 Label* fall_through = NULL;
2865 context()->PrepareTest(&materialize_true, &materialize_false,
2866 &if_true, &if_false, &fall_through);
2867
2868 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2869 __ SmiTst(v0, a4);
2870 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2871
2872 context()->Plug(if_true, if_false);
2873}
2874
2875
2876void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2877 ZoneList<Expression*>* args = expr->arguments();
2878 DCHECK(args->length() == 1);
2879
2880 VisitForAccumulatorValue(args->at(0));
2881
2882 Label materialize_true, materialize_false;
2883 Label* if_true = NULL;
2884 Label* if_false = NULL;
2885 Label* fall_through = NULL;
2886 context()->PrepareTest(&materialize_true, &materialize_false,
2887 &if_true, &if_false, &fall_through);
2888
2889 __ JumpIfSmi(v0, if_false);
2890 __ GetObjectType(v0, a1, a1);
2891 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2892 Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2893 if_true, if_false, fall_through);
2894
2895 context()->Plug(if_true, if_false);
2896}
2897
2898
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002899void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2900 ZoneList<Expression*>* args = expr->arguments();
2901 DCHECK(args->length() == 1);
2902
2903 VisitForAccumulatorValue(args->at(0));
2904
2905 Label materialize_true, materialize_false;
2906 Label* if_true = NULL;
2907 Label* if_false = NULL;
2908 Label* fall_through = NULL;
2909 context()->PrepareTest(&materialize_true, &materialize_false,
2910 &if_true, &if_false, &fall_through);
2911
2912 __ JumpIfSmi(v0, if_false);
2913 __ GetObjectType(v0, a1, a1);
2914 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2915 Split(eq, a1, Operand(JS_ARRAY_TYPE),
2916 if_true, if_false, fall_through);
2917
2918 context()->Plug(if_true, if_false);
2919}
2920
2921
2922void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2923 ZoneList<Expression*>* args = expr->arguments();
2924 DCHECK(args->length() == 1);
2925
2926 VisitForAccumulatorValue(args->at(0));
2927
2928 Label materialize_true, materialize_false;
2929 Label* if_true = NULL;
2930 Label* if_false = NULL;
2931 Label* fall_through = NULL;
2932 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2933 &if_false, &fall_through);
2934
2935 __ JumpIfSmi(v0, if_false);
2936 __ GetObjectType(v0, a1, a1);
2937 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2938 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
2939
2940 context()->Plug(if_true, if_false);
2941}
2942
2943
2944void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2945 ZoneList<Expression*>* args = expr->arguments();
2946 DCHECK(args->length() == 1);
2947
2948 VisitForAccumulatorValue(args->at(0));
2949
2950 Label materialize_true, materialize_false;
2951 Label* if_true = NULL;
2952 Label* if_false = NULL;
2953 Label* fall_through = NULL;
2954 context()->PrepareTest(&materialize_true, &materialize_false,
2955 &if_true, &if_false, &fall_through);
2956
2957 __ JumpIfSmi(v0, if_false);
2958 __ GetObjectType(v0, a1, a1);
2959 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2960 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2961
2962 context()->Plug(if_true, if_false);
2963}
2964
2965
2966void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2967 ZoneList<Expression*>* args = expr->arguments();
2968 DCHECK(args->length() == 1);
2969
2970 VisitForAccumulatorValue(args->at(0));
2971
2972 Label materialize_true, materialize_false;
2973 Label* if_true = NULL;
2974 Label* if_false = NULL;
2975 Label* fall_through = NULL;
2976 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2977 &if_false, &fall_through);
2978
2979 __ JumpIfSmi(v0, if_false);
2980 __ GetObjectType(v0, a1, a1);
2981 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2982 Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
2983
2984 context()->Plug(if_true, if_false);
2985}
2986
2987
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002988void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2989 ZoneList<Expression*>* args = expr->arguments();
2990 DCHECK(args->length() == 1);
2991 Label done, null, function, non_function_constructor;
2992
2993 VisitForAccumulatorValue(args->at(0));
2994
2995 // If the object is not a JSReceiver, we return null.
2996 __ JumpIfSmi(v0, &null);
2997 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2998 __ GetObjectType(v0, v0, a1); // Map is now in v0.
2999 __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3000
Ben Murdochda12d292016-06-02 14:46:10 +01003001 // Return 'Function' for JSFunction and JSBoundFunction objects.
3002 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
3003 __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003004
3005 // Check if the constructor in the map is a JS function.
3006 Register instance_type = a2;
3007 __ GetMapConstructor(v0, v0, a1, instance_type);
3008 __ Branch(&non_function_constructor, ne, instance_type,
3009 Operand(JS_FUNCTION_TYPE));
3010
3011 // v0 now contains the constructor function. Grab the
3012 // instance class name from there.
3013 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3014 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3015 __ Branch(&done);
3016
3017 // Functions have class 'Function'.
3018 __ bind(&function);
3019 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3020 __ jmp(&done);
3021
3022 // Objects with a non-function constructor have class 'Object'.
3023 __ bind(&non_function_constructor);
3024 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3025 __ jmp(&done);
3026
3027 // Non-JS objects have class null.
3028 __ bind(&null);
3029 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3030
3031 // All done.
3032 __ bind(&done);
3033
3034 context()->Plug(v0);
3035}
3036
3037
3038void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3039 ZoneList<Expression*>* args = expr->arguments();
3040 DCHECK(args->length() == 1);
3041
3042 VisitForAccumulatorValue(args->at(0)); // Load the object.
3043
3044 Label done;
3045 // If the object is a smi return the object.
3046 __ JumpIfSmi(v0, &done);
3047 // If the object is not a value type, return the object.
3048 __ GetObjectType(v0, a1, a1);
3049 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3050
3051 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3052
3053 __ bind(&done);
3054 context()->Plug(v0);
3055}
3056
3057
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003058void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3059 ZoneList<Expression*>* args = expr->arguments();
3060 DCHECK_EQ(3, args->length());
3061
3062 Register string = v0;
3063 Register index = a1;
3064 Register value = a2;
3065
3066 VisitForStackValue(args->at(0)); // index
3067 VisitForStackValue(args->at(1)); // value
3068 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003069 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003070
3071 if (FLAG_debug_code) {
3072 __ SmiTst(value, at);
3073 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3074 __ SmiTst(index, at);
3075 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3076 __ SmiUntag(index, index);
3077 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3078 Register scratch = t1;
3079 __ EmitSeqStringSetCharCheck(
3080 string, index, value, scratch, one_byte_seq_type);
3081 __ SmiTag(index, index);
3082 }
3083
3084 __ SmiUntag(value, value);
3085 __ Daddu(at,
3086 string,
3087 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3088 __ SmiUntag(index);
3089 __ Daddu(at, at, index);
3090 __ sb(value, MemOperand(at));
3091 context()->Plug(string);
3092}
3093
3094
3095void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3096 ZoneList<Expression*>* args = expr->arguments();
3097 DCHECK_EQ(3, args->length());
3098
3099 Register string = v0;
3100 Register index = a1;
3101 Register value = a2;
3102
3103 VisitForStackValue(args->at(0)); // index
3104 VisitForStackValue(args->at(1)); // value
3105 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003106 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003107
3108 if (FLAG_debug_code) {
3109 __ SmiTst(value, at);
3110 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3111 __ SmiTst(index, at);
3112 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3113 __ SmiUntag(index, index);
3114 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3115 Register scratch = t1;
3116 __ EmitSeqStringSetCharCheck(
3117 string, index, value, scratch, two_byte_seq_type);
3118 __ SmiTag(index, index);
3119 }
3120
3121 __ SmiUntag(value, value);
3122 __ Daddu(at,
3123 string,
3124 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3125 __ dsra(index, index, 32 - 1);
3126 __ Daddu(at, at, index);
3127 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3128 __ sh(value, MemOperand(at));
3129 context()->Plug(string);
3130}
3131
3132
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003133void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3134 ZoneList<Expression*>* args = expr->arguments();
3135 DCHECK(args->length() == 1);
3136
3137 VisitForAccumulatorValue(args->at(0));
3138
3139 Label done;
3140 StringCharFromCodeGenerator generator(v0, a1);
3141 generator.GenerateFast(masm_);
3142 __ jmp(&done);
3143
3144 NopRuntimeCallHelper call_helper;
3145 generator.GenerateSlow(masm_, call_helper);
3146
3147 __ bind(&done);
3148 context()->Plug(a1);
3149}
3150
3151
3152void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3153 ZoneList<Expression*>* args = expr->arguments();
3154 DCHECK(args->length() == 2);
3155
3156 VisitForStackValue(args->at(0));
3157 VisitForAccumulatorValue(args->at(1));
3158 __ mov(a0, result_register());
3159
3160 Register object = a1;
3161 Register index = a0;
3162 Register result = v0;
3163
Ben Murdoch097c5b22016-05-18 11:27:45 +01003164 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003165
3166 Label need_conversion;
3167 Label index_out_of_range;
3168 Label done;
3169 StringCharCodeAtGenerator generator(object,
3170 index,
3171 result,
3172 &need_conversion,
3173 &need_conversion,
3174 &index_out_of_range,
3175 STRING_INDEX_IS_NUMBER);
3176 generator.GenerateFast(masm_);
3177 __ jmp(&done);
3178
3179 __ bind(&index_out_of_range);
3180 // When the index is out of range, the spec requires us to return
3181 // NaN.
3182 __ LoadRoot(result, Heap::kNanValueRootIndex);
3183 __ jmp(&done);
3184
3185 __ bind(&need_conversion);
3186 // Load the undefined value into the result register, which will
3187 // trigger conversion.
3188 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3189 __ jmp(&done);
3190
3191 NopRuntimeCallHelper call_helper;
3192 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3193
3194 __ bind(&done);
3195 context()->Plug(result);
3196}
3197
3198
3199void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3200 ZoneList<Expression*>* args = expr->arguments();
3201 DCHECK(args->length() == 2);
3202
3203 VisitForStackValue(args->at(0));
3204 VisitForAccumulatorValue(args->at(1));
3205 __ mov(a0, result_register());
3206
3207 Register object = a1;
3208 Register index = a0;
3209 Register scratch = a3;
3210 Register result = v0;
3211
Ben Murdoch097c5b22016-05-18 11:27:45 +01003212 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003213
3214 Label need_conversion;
3215 Label index_out_of_range;
3216 Label done;
3217 StringCharAtGenerator generator(object,
3218 index,
3219 scratch,
3220 result,
3221 &need_conversion,
3222 &need_conversion,
3223 &index_out_of_range,
3224 STRING_INDEX_IS_NUMBER);
3225 generator.GenerateFast(masm_);
3226 __ jmp(&done);
3227
3228 __ bind(&index_out_of_range);
3229 // When the index is out of range, the spec requires us to return
3230 // the empty string.
3231 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3232 __ jmp(&done);
3233
3234 __ bind(&need_conversion);
3235 // Move smi zero into the result register, which will trigger
3236 // conversion.
3237 __ li(result, Operand(Smi::FromInt(0)));
3238 __ jmp(&done);
3239
3240 NopRuntimeCallHelper call_helper;
3241 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3242
3243 __ bind(&done);
3244 context()->Plug(result);
3245}
3246
3247
3248void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3249 ZoneList<Expression*>* args = expr->arguments();
3250 DCHECK_LE(2, args->length());
3251 // Push target, receiver and arguments onto the stack.
3252 for (Expression* const arg : *args) {
3253 VisitForStackValue(arg);
3254 }
3255 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3256 // Move target to a1.
3257 int const argc = args->length() - 2;
3258 __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
3259 // Call the target.
3260 __ li(a0, Operand(argc));
3261 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003262 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003263 // Restore context register.
3264 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3265 // Discard the function left on TOS.
3266 context()->DropAndPlug(1, v0);
3267}
3268
3269
3270void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3271 ZoneList<Expression*>* args = expr->arguments();
3272 VisitForAccumulatorValue(args->at(0));
3273
3274 Label materialize_true, materialize_false;
3275 Label* if_true = NULL;
3276 Label* if_false = NULL;
3277 Label* fall_through = NULL;
3278 context()->PrepareTest(&materialize_true, &materialize_false,
3279 &if_true, &if_false, &fall_through);
3280
3281 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3282 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3283
3284 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3285 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3286
3287 context()->Plug(if_true, if_false);
3288}
3289
3290
3291void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3292 ZoneList<Expression*>* args = expr->arguments();
3293 DCHECK(args->length() == 1);
3294 VisitForAccumulatorValue(args->at(0));
3295
3296 __ AssertString(v0);
3297
3298 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3299 __ IndexFromHash(v0, v0);
3300
3301 context()->Plug(v0);
3302}
3303
3304
3305void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3306 ZoneList<Expression*>* args = expr->arguments();
3307 DCHECK_EQ(1, args->length());
3308 VisitForAccumulatorValue(args->at(0));
3309 __ AssertFunction(v0);
3310 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3311 __ ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
3312 context()->Plug(v0);
3313}
3314
Ben Murdochda12d292016-06-02 14:46:10 +01003315void FullCodeGenerator::EmitGetOrdinaryHasInstance(CallRuntime* expr) {
3316 DCHECK_EQ(0, expr->arguments()->length());
3317 __ LoadNativeContextSlot(Context::ORDINARY_HAS_INSTANCE_INDEX, v0);
3318 context()->Plug(v0);
3319}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003320
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003321void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3322 DCHECK(expr->arguments()->length() == 0);
3323 ExternalReference debug_is_active =
3324 ExternalReference::debug_is_active_address(isolate());
3325 __ li(at, Operand(debug_is_active));
3326 __ lbu(v0, MemOperand(at));
3327 __ SmiTag(v0);
3328 context()->Plug(v0);
3329}
3330
3331
3332void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3333 ZoneList<Expression*>* args = expr->arguments();
3334 DCHECK_EQ(2, args->length());
3335 VisitForStackValue(args->at(0));
3336 VisitForStackValue(args->at(1));
3337
3338 Label runtime, done;
3339
3340 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT);
3341 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
3342 __ Pop(a2, a3);
3343 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
3344 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3345 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3346 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
3347 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
3348 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
3349 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3350 __ jmp(&done);
3351
3352 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003353 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003354
3355 __ bind(&done);
3356 context()->Plug(v0);
3357}
3358
3359
3360void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003361 // Push function.
3362 __ LoadNativeContextSlot(expr->context_index(), v0);
3363 PushOperand(v0);
3364
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003365 // Push undefined as the receiver.
3366 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003367 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003368}
3369
3370
3371void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3372 ZoneList<Expression*>* args = expr->arguments();
3373 int arg_count = args->length();
3374
3375 SetCallPosition(expr);
3376 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3377 __ li(a0, Operand(arg_count));
3378 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3379 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003380 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003381
Ben Murdochda12d292016-06-02 14:46:10 +01003382 // Restore context register.
3383 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003384}
3385
3386
3387void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3388 switch (expr->op()) {
3389 case Token::DELETE: {
3390 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3391 Property* property = expr->expression()->AsProperty();
3392 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3393
3394 if (property != NULL) {
3395 VisitForStackValue(property->obj());
3396 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003397 CallRuntimeWithOperands(is_strict(language_mode())
3398 ? Runtime::kDeleteProperty_Strict
3399 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003400 context()->Plug(v0);
3401 } else if (proxy != NULL) {
3402 Variable* var = proxy->var();
3403 // Delete of an unqualified identifier is disallowed in strict mode but
3404 // "delete this" is allowed.
3405 bool is_this = var->HasThisName(isolate());
3406 DCHECK(is_sloppy(language_mode()) || is_this);
3407 if (var->IsUnallocatedOrGlobalSlot()) {
3408 __ LoadGlobalObject(a2);
3409 __ li(a1, Operand(var->name()));
3410 __ Push(a2, a1);
3411 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3412 context()->Plug(v0);
3413 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3414 // Result of deleting non-global, non-dynamic variables is false.
3415 // The subexpression does not have side effects.
3416 context()->Plug(is_this);
3417 } else {
3418 // Non-global variable. Call the runtime to try to delete from the
3419 // context where the variable was introduced.
3420 DCHECK(!context_register().is(a2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003421 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003422 __ CallRuntime(Runtime::kDeleteLookupSlot);
3423 context()->Plug(v0);
3424 }
3425 } else {
3426 // Result of deleting non-property, non-variable reference is true.
3427 // The subexpression may have side effects.
3428 VisitForEffect(expr->expression());
3429 context()->Plug(true);
3430 }
3431 break;
3432 }
3433
3434 case Token::VOID: {
3435 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3436 VisitForEffect(expr->expression());
3437 context()->Plug(Heap::kUndefinedValueRootIndex);
3438 break;
3439 }
3440
3441 case Token::NOT: {
3442 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3443 if (context()->IsEffect()) {
3444 // Unary NOT has no side effects so it's only necessary to visit the
3445 // subexpression. Match the optimizing compiler by not branching.
3446 VisitForEffect(expr->expression());
3447 } else if (context()->IsTest()) {
3448 const TestContext* test = TestContext::cast(context());
3449 // The labels are swapped for the recursive call.
3450 VisitForControl(expr->expression(),
3451 test->false_label(),
3452 test->true_label(),
3453 test->fall_through());
3454 context()->Plug(test->true_label(), test->false_label());
3455 } else {
3456 // We handle value contexts explicitly rather than simply visiting
3457 // for control and plugging the control flow into the context,
3458 // because we need to prepare a pair of extra administrative AST ids
3459 // for the optimizing compiler.
3460 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3461 Label materialize_true, materialize_false, done;
3462 VisitForControl(expr->expression(),
3463 &materialize_false,
3464 &materialize_true,
3465 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003466 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003467 __ bind(&materialize_true);
3468 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3469 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3470 if (context()->IsStackValue()) __ push(v0);
3471 __ jmp(&done);
3472 __ bind(&materialize_false);
3473 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3474 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3475 if (context()->IsStackValue()) __ push(v0);
3476 __ bind(&done);
3477 }
3478 break;
3479 }
3480
3481 case Token::TYPEOF: {
3482 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3483 {
3484 AccumulatorValueContext context(this);
3485 VisitForTypeofValue(expr->expression());
3486 }
3487 __ mov(a3, v0);
3488 TypeofStub typeof_stub(isolate());
3489 __ CallStub(&typeof_stub);
3490 context()->Plug(v0);
3491 break;
3492 }
3493
3494 default:
3495 UNREACHABLE();
3496 }
3497}
3498
3499
3500void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3501 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3502
3503 Comment cmnt(masm_, "[ CountOperation");
3504
3505 Property* prop = expr->expression()->AsProperty();
3506 LhsKind assign_type = Property::GetAssignType(prop);
3507
3508 // Evaluate expression and get value.
3509 if (assign_type == VARIABLE) {
3510 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3511 AccumulatorValueContext context(this);
3512 EmitVariableLoad(expr->expression()->AsVariableProxy());
3513 } else {
3514 // Reserve space for result of postfix operation.
3515 if (expr->is_postfix() && !context()->IsEffect()) {
3516 __ li(at, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003517 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003518 }
3519 switch (assign_type) {
3520 case NAMED_PROPERTY: {
3521 // Put the object both on the stack and in the register.
3522 VisitForStackValue(prop->obj());
3523 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3524 EmitNamedPropertyLoad(prop);
3525 break;
3526 }
3527
3528 case NAMED_SUPER_PROPERTY: {
3529 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3530 VisitForAccumulatorValue(
3531 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003532 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003533 const Register scratch = a1;
3534 __ ld(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003535 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003536 EmitNamedSuperPropertyLoad(prop);
3537 break;
3538 }
3539
3540 case KEYED_SUPER_PROPERTY: {
3541 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3542 VisitForAccumulatorValue(
3543 prop->obj()->AsSuperPropertyReference()->home_object());
3544 const Register scratch = a1;
3545 const Register scratch1 = a4;
3546 __ Move(scratch, result_register());
3547 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003548 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003549 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003550 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003551 EmitKeyedSuperPropertyLoad(prop);
3552 break;
3553 }
3554
3555 case KEYED_PROPERTY: {
3556 VisitForStackValue(prop->obj());
3557 VisitForStackValue(prop->key());
3558 __ ld(LoadDescriptor::ReceiverRegister(),
3559 MemOperand(sp, 1 * kPointerSize));
3560 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3561 EmitKeyedPropertyLoad(prop);
3562 break;
3563 }
3564
3565 case VARIABLE:
3566 UNREACHABLE();
3567 }
3568 }
3569
3570 // We need a second deoptimization point after loading the value
3571 // in case evaluating the property load my have a side effect.
3572 if (assign_type == VARIABLE) {
3573 PrepareForBailout(expr->expression(), TOS_REG);
3574 } else {
3575 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3576 }
3577
3578 // Inline smi case if we are in a loop.
3579 Label stub_call, done;
3580 JumpPatchSite patch_site(masm_);
3581
3582 int count_value = expr->op() == Token::INC ? 1 : -1;
3583 __ mov(a0, v0);
3584 if (ShouldInlineSmiCase(expr->op())) {
3585 Label slow;
3586 patch_site.EmitJumpIfNotSmi(v0, &slow);
3587
3588 // Save result for postfix expressions.
3589 if (expr->is_postfix()) {
3590 if (!context()->IsEffect()) {
3591 // Save the result on the stack. If we have a named or keyed property
3592 // we store the result under the receiver that is currently on top
3593 // of the stack.
3594 switch (assign_type) {
3595 case VARIABLE:
3596 __ push(v0);
3597 break;
3598 case NAMED_PROPERTY:
3599 __ sd(v0, MemOperand(sp, kPointerSize));
3600 break;
3601 case NAMED_SUPER_PROPERTY:
3602 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3603 break;
3604 case KEYED_PROPERTY:
3605 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3606 break;
3607 case KEYED_SUPER_PROPERTY:
3608 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3609 break;
3610 }
3611 }
3612 }
3613
3614 Register scratch1 = a1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003615 __ li(scratch1, Operand(Smi::FromInt(count_value)));
Ben Murdochda12d292016-06-02 14:46:10 +01003616 __ DaddBranchNoOvf(v0, v0, Operand(scratch1), &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003617 // Call stub. Undo operation first.
3618 __ Move(v0, a0);
3619 __ jmp(&stub_call);
3620 __ bind(&slow);
3621 }
Ben Murdochda12d292016-06-02 14:46:10 +01003622
3623 // Convert old value into a number.
3624 ToNumberStub convert_stub(isolate());
3625 __ CallStub(&convert_stub);
3626 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003627
3628 // Save result for postfix expressions.
3629 if (expr->is_postfix()) {
3630 if (!context()->IsEffect()) {
3631 // Save the result on the stack. If we have a named or keyed property
3632 // we store the result under the receiver that is currently on top
3633 // of the stack.
3634 switch (assign_type) {
3635 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003636 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003637 break;
3638 case NAMED_PROPERTY:
3639 __ sd(v0, MemOperand(sp, kPointerSize));
3640 break;
3641 case NAMED_SUPER_PROPERTY:
3642 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3643 break;
3644 case KEYED_PROPERTY:
3645 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3646 break;
3647 case KEYED_SUPER_PROPERTY:
3648 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3649 break;
3650 }
3651 }
3652 }
3653
3654 __ bind(&stub_call);
3655 __ mov(a1, v0);
3656 __ li(a0, Operand(Smi::FromInt(count_value)));
3657
3658 SetExpressionPosition(expr);
3659
Ben Murdoch097c5b22016-05-18 11:27:45 +01003660 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003661 CallIC(code, expr->CountBinOpFeedbackId());
3662 patch_site.EmitPatchInfo();
3663 __ bind(&done);
3664
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003665 // Store the value returned in v0.
3666 switch (assign_type) {
3667 case VARIABLE:
3668 if (expr->is_postfix()) {
3669 { EffectContext context(this);
3670 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3671 Token::ASSIGN, expr->CountSlot());
3672 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3673 context.Plug(v0);
3674 }
3675 // For all contexts except EffectConstant we have the result on
3676 // top of the stack.
3677 if (!context()->IsEffect()) {
3678 context()->PlugTOS();
3679 }
3680 } else {
3681 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3682 Token::ASSIGN, expr->CountSlot());
3683 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3684 context()->Plug(v0);
3685 }
3686 break;
3687 case NAMED_PROPERTY: {
3688 __ mov(StoreDescriptor::ValueRegister(), result_register());
3689 __ li(StoreDescriptor::NameRegister(),
3690 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003691 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003692 EmitLoadStoreICSlot(expr->CountSlot());
3693 CallStoreIC();
3694 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3695 if (expr->is_postfix()) {
3696 if (!context()->IsEffect()) {
3697 context()->PlugTOS();
3698 }
3699 } else {
3700 context()->Plug(v0);
3701 }
3702 break;
3703 }
3704 case NAMED_SUPER_PROPERTY: {
3705 EmitNamedSuperPropertyStore(prop);
3706 if (expr->is_postfix()) {
3707 if (!context()->IsEffect()) {
3708 context()->PlugTOS();
3709 }
3710 } else {
3711 context()->Plug(v0);
3712 }
3713 break;
3714 }
3715 case KEYED_SUPER_PROPERTY: {
3716 EmitKeyedSuperPropertyStore(prop);
3717 if (expr->is_postfix()) {
3718 if (!context()->IsEffect()) {
3719 context()->PlugTOS();
3720 }
3721 } else {
3722 context()->Plug(v0);
3723 }
3724 break;
3725 }
3726 case KEYED_PROPERTY: {
3727 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003728 PopOperands(StoreDescriptor::ReceiverRegister(),
3729 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003730 Handle<Code> ic =
3731 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3732 EmitLoadStoreICSlot(expr->CountSlot());
3733 CallIC(ic);
3734 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3735 if (expr->is_postfix()) {
3736 if (!context()->IsEffect()) {
3737 context()->PlugTOS();
3738 }
3739 } else {
3740 context()->Plug(v0);
3741 }
3742 break;
3743 }
3744 }
3745}
3746
3747
3748void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3749 Expression* sub_expr,
3750 Handle<String> check) {
3751 Label materialize_true, materialize_false;
3752 Label* if_true = NULL;
3753 Label* if_false = NULL;
3754 Label* fall_through = NULL;
3755 context()->PrepareTest(&materialize_true, &materialize_false,
3756 &if_true, &if_false, &fall_through);
3757
3758 { AccumulatorValueContext context(this);
3759 VisitForTypeofValue(sub_expr);
3760 }
3761 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3762
3763 Factory* factory = isolate()->factory();
3764 if (String::Equals(check, factory->number_string())) {
3765 __ JumpIfSmi(v0, if_true);
3766 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3767 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3768 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3769 } else if (String::Equals(check, factory->string_string())) {
3770 __ JumpIfSmi(v0, if_false);
3771 __ GetObjectType(v0, v0, a1);
3772 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3773 fall_through);
3774 } else if (String::Equals(check, factory->symbol_string())) {
3775 __ JumpIfSmi(v0, if_false);
3776 __ GetObjectType(v0, v0, a1);
3777 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3778 } else if (String::Equals(check, factory->boolean_string())) {
3779 __ LoadRoot(at, Heap::kTrueValueRootIndex);
3780 __ Branch(if_true, eq, v0, Operand(at));
3781 __ LoadRoot(at, Heap::kFalseValueRootIndex);
3782 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3783 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003784 __ LoadRoot(at, Heap::kNullValueRootIndex);
3785 __ Branch(if_false, eq, v0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003786 __ JumpIfSmi(v0, if_false);
3787 // Check for undetectable objects => true.
3788 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3789 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3790 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3791 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3792 } else if (String::Equals(check, factory->function_string())) {
3793 __ JumpIfSmi(v0, if_false);
3794 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3795 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3796 __ And(a1, a1,
3797 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3798 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3799 fall_through);
3800 } else if (String::Equals(check, factory->object_string())) {
3801 __ JumpIfSmi(v0, if_false);
3802 __ LoadRoot(at, Heap::kNullValueRootIndex);
3803 __ Branch(if_true, eq, v0, Operand(at));
3804 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3805 __ GetObjectType(v0, v0, a1);
3806 __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3807 // Check for callable or undetectable objects => false.
3808 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3809 __ And(a1, a1,
3810 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3811 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3812// clang-format off
3813#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3814 } else if (String::Equals(check, factory->type##_string())) { \
3815 __ JumpIfSmi(v0, if_false); \
3816 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
3817 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
3818 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3819 SIMD128_TYPES(SIMD128_TYPE)
3820#undef SIMD128_TYPE
3821 // clang-format on
3822 } else {
3823 if (if_false != fall_through) __ jmp(if_false);
3824 }
3825 context()->Plug(if_true, if_false);
3826}
3827
3828
3829void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3830 Comment cmnt(masm_, "[ CompareOperation");
3831 SetExpressionPosition(expr);
3832
3833 // First we try a fast inlined version of the compare when one of
3834 // the operands is a literal.
3835 if (TryLiteralCompare(expr)) return;
3836
3837 // Always perform the comparison for its control flow. Pack the result
3838 // into the expression's context after the comparison is performed.
3839 Label materialize_true, materialize_false;
3840 Label* if_true = NULL;
3841 Label* if_false = NULL;
3842 Label* fall_through = NULL;
3843 context()->PrepareTest(&materialize_true, &materialize_false,
3844 &if_true, &if_false, &fall_through);
3845
3846 Token::Value op = expr->op();
3847 VisitForStackValue(expr->left());
3848 switch (op) {
3849 case Token::IN:
3850 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003851 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003852 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3853 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3854 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3855 break;
3856
3857 case Token::INSTANCEOF: {
3858 VisitForAccumulatorValue(expr->right());
3859 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003860 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003861 InstanceOfStub stub(isolate());
3862 __ CallStub(&stub);
3863 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3864 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3865 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3866 break;
3867 }
3868
3869 default: {
3870 VisitForAccumulatorValue(expr->right());
3871 Condition cc = CompareIC::ComputeCondition(op);
3872 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003873 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003874
3875 bool inline_smi_code = ShouldInlineSmiCase(op);
3876 JumpPatchSite patch_site(masm_);
3877 if (inline_smi_code) {
3878 Label slow_case;
3879 __ Or(a2, a0, Operand(a1));
3880 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
3881 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
3882 __ bind(&slow_case);
3883 }
3884
Ben Murdoch097c5b22016-05-18 11:27:45 +01003885 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003886 CallIC(ic, expr->CompareOperationFeedbackId());
3887 patch_site.EmitPatchInfo();
3888 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3889 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
3890 }
3891 }
3892
3893 // Convert the result of the comparison into one expected for this
3894 // expression's context.
3895 context()->Plug(if_true, if_false);
3896}
3897
3898
3899void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3900 Expression* sub_expr,
3901 NilValue nil) {
3902 Label materialize_true, materialize_false;
3903 Label* if_true = NULL;
3904 Label* if_false = NULL;
3905 Label* fall_through = NULL;
3906 context()->PrepareTest(&materialize_true, &materialize_false,
3907 &if_true, &if_false, &fall_through);
3908
3909 VisitForAccumulatorValue(sub_expr);
3910 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003911 if (expr->op() == Token::EQ_STRICT) {
3912 Heap::RootListIndex nil_value = nil == kNullValue ?
3913 Heap::kNullValueRootIndex :
3914 Heap::kUndefinedValueRootIndex;
3915 __ LoadRoot(a1, nil_value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003916 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
Ben Murdochda12d292016-06-02 14:46:10 +01003917 } else {
3918 __ JumpIfSmi(v0, if_false);
3919 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3920 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3921 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3922 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003923 }
3924 context()->Plug(if_true, if_false);
3925}
3926
3927
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003928Register FullCodeGenerator::result_register() {
3929 return v0;
3930}
3931
3932
3933Register FullCodeGenerator::context_register() {
3934 return cp;
3935}
3936
Ben Murdochda12d292016-06-02 14:46:10 +01003937void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3938 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3939 DCHECK(IsAligned(frame_offset, kPointerSize));
3940 // __ sw(value, MemOperand(fp, frame_offset));
3941 __ ld(value, MemOperand(fp, frame_offset));
3942}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003943
3944void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3945 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3946 DCHECK(IsAligned(frame_offset, kPointerSize));
3947 // __ sw(value, MemOperand(fp, frame_offset));
3948 __ sd(value, MemOperand(fp, frame_offset));
3949}
3950
3951
3952void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3953 __ ld(dst, ContextMemOperand(cp, context_index));
3954}
3955
3956
3957void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3958 Scope* closure_scope = scope()->ClosureScope();
3959 if (closure_scope->is_script_scope() ||
3960 closure_scope->is_module_scope()) {
3961 // Contexts nested in the native context have a canonical empty function
3962 // as their closure, not the anonymous closure containing the global
3963 // code.
3964 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
3965 } else if (closure_scope->is_eval_scope()) {
3966 // Contexts created by a call to eval have the same closure as the
3967 // context calling eval, not the anonymous closure containing the eval
3968 // code. Fetch it from the context.
3969 __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3970 } else {
3971 DCHECK(closure_scope->is_function_scope());
3972 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3973 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003974 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003975}
3976
3977
3978// ----------------------------------------------------------------------------
3979// Non-local control flow support.
3980
3981void FullCodeGenerator::EnterFinallyBlock() {
3982 DCHECK(!result_register().is(a1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003983 // Store pending message while executing finally block.
3984 ExternalReference pending_message_obj =
3985 ExternalReference::address_of_pending_message_obj(isolate());
3986 __ li(at, Operand(pending_message_obj));
3987 __ ld(a1, MemOperand(at));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003988 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003989
3990 ClearPendingMessage();
3991}
3992
3993
3994void FullCodeGenerator::ExitFinallyBlock() {
3995 DCHECK(!result_register().is(a1));
3996 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003997 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003998 ExternalReference pending_message_obj =
3999 ExternalReference::address_of_pending_message_obj(isolate());
4000 __ li(at, Operand(pending_message_obj));
4001 __ sd(a1, MemOperand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004002}
4003
4004
4005void FullCodeGenerator::ClearPendingMessage() {
4006 DCHECK(!result_register().is(a1));
4007 ExternalReference pending_message_obj =
4008 ExternalReference::address_of_pending_message_obj(isolate());
4009 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
4010 __ li(at, Operand(pending_message_obj));
4011 __ sd(a1, MemOperand(at));
4012}
4013
4014
Ben Murdoch097c5b22016-05-18 11:27:45 +01004015void FullCodeGenerator::DeferredCommands::EmitCommands() {
4016 __ Pop(result_register()); // Restore the accumulator.
4017 __ Pop(a1); // Get the token.
4018 for (DeferredCommand cmd : commands_) {
4019 Label skip;
4020 __ li(at, Operand(Smi::FromInt(cmd.token)));
4021 __ Branch(&skip, ne, a1, Operand(at));
4022 switch (cmd.command) {
4023 case kReturn:
4024 codegen_->EmitUnwindAndReturn();
4025 break;
4026 case kThrow:
4027 __ Push(result_register());
4028 __ CallRuntime(Runtime::kReThrow);
4029 break;
4030 case kContinue:
4031 codegen_->EmitContinue(cmd.target);
4032 break;
4033 case kBreak:
4034 codegen_->EmitBreak(cmd.target);
4035 break;
4036 }
4037 __ bind(&skip);
4038 }
4039}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004040
4041#undef __
4042
4043
4044void BackEdgeTable::PatchAt(Code* unoptimized_code,
4045 Address pc,
4046 BackEdgeState target_state,
4047 Code* replacement_code) {
4048 static const int kInstrSize = Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01004049 Address pc_immediate_load_address =
4050 Assembler::target_address_from_return_address(pc);
4051 Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004052 Isolate* isolate = unoptimized_code->GetIsolate();
4053 CodePatcher patcher(isolate, branch_address, 1);
4054
4055 switch (target_state) {
4056 case INTERRUPT:
4057 // slt at, a3, zero_reg (in case of count based interrupts)
4058 // beq at, zero_reg, ok
4059 // lui t9, <interrupt stub address> upper
4060 // ori t9, <interrupt stub address> u-middle
4061 // dsll t9, t9, 16
4062 // ori t9, <interrupt stub address> lower
4063 // jalr t9
4064 // nop
4065 // ok-label ----- pc_after points here
4066 patcher.masm()->slt(at, a3, zero_reg);
4067 break;
4068 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004069 // addiu at, zero_reg, 1
4070 // beq at, zero_reg, ok ;; Not changed
4071 // lui t9, <on-stack replacement address> upper
4072 // ori t9, <on-stack replacement address> middle
4073 // dsll t9, t9, 16
4074 // ori t9, <on-stack replacement address> lower
4075 // jalr t9 ;; Not changed
4076 // nop ;; Not changed
4077 // ok-label ----- pc_after points here
4078 patcher.masm()->daddiu(at, zero_reg, 1);
4079 break;
4080 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004081 // Replace the stack check address in the load-immediate (6-instr sequence)
4082 // with the entry address of the replacement code.
4083 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
4084 replacement_code->entry());
4085
4086 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4087 unoptimized_code, pc_immediate_load_address, replacement_code);
4088}
4089
4090
4091BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4092 Isolate* isolate,
4093 Code* unoptimized_code,
4094 Address pc) {
4095 static const int kInstrSize = Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01004096 Address pc_immediate_load_address =
4097 Assembler::target_address_from_return_address(pc);
4098 Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004099
Ben Murdochda12d292016-06-02 14:46:10 +01004100 DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004101 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4102 DCHECK(reinterpret_cast<uint64_t>(
4103 Assembler::target_address_at(pc_immediate_load_address)) ==
4104 reinterpret_cast<uint64_t>(
4105 isolate->builtins()->InterruptCheck()->entry()));
4106 return INTERRUPT;
4107 }
4108
4109 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004111 DCHECK(reinterpret_cast<uint64_t>(
Ben Murdochda12d292016-06-02 14:46:10 +01004112 Assembler::target_address_at(pc_immediate_load_address)) ==
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004113 reinterpret_cast<uint64_t>(
Ben Murdochda12d292016-06-02 14:46:10 +01004114 isolate->builtins()->OnStackReplacement()->entry()));
4115 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004116}
4117
4118
4119} // namespace internal
4120} // namespace v8
4121
4122#endif // V8_TARGET_ARCH_MIPS64