blob: c85dee464453c881c8cffc4a413856e36c4eb3e7 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS64
6
7// Note on Mips implementation:
8//
9// The result_register() for mips is the 'v0' register, which is defined
10// by the ABI to contain function return values. However, the first
11// parameter to a function is defined to be 'a0'. So there are many
12// places where we have to move a previous result in v0 to a0 for the
13// next call: mov(a0, v0). This is not needed on the other architectures.
14
15#include "src/ast/scopes.h"
16#include "src/code-factory.h"
17#include "src/code-stubs.h"
18#include "src/codegen.h"
19#include "src/debug/debug.h"
20#include "src/full-codegen/full-codegen.h"
21#include "src/ic/ic.h"
22#include "src/parsing/parser.h"
23
24#include "src/mips64/code-stubs-mips64.h"
25#include "src/mips64/macro-assembler-mips64.h"
26
27namespace v8 {
28namespace internal {
29
Ben Murdoch097c5b22016-05-18 11:27:45 +010030#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031
32// A patch site is a location in the code which it is possible to patch. This
33// class has a number of methods to emit the code which is patchable and the
34// method EmitPatchInfo to record a marker back to the patchable code. This
35// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
36// (raw 16 bit immediate value is used) is the delta from the pc to the first
37// instruction of the patchable code.
38// The marker instruction is effectively a NOP (dest is zero_reg) and will
39// never be emitted by normal code.
40class JumpPatchSite BASE_EMBEDDED {
41 public:
42 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
43#ifdef DEBUG
44 info_emitted_ = false;
45#endif
46 }
47
48 ~JumpPatchSite() {
49 DCHECK(patch_site_.is_bound() == info_emitted_);
50 }
51
52 // When initially emitting this ensure that a jump is always generated to skip
53 // the inlined smi code.
54 void EmitJumpIfNotSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
57 __ bind(&patch_site_);
58 __ andi(at, reg, 0);
59 // Always taken before patched.
60 __ BranchShort(target, eq, at, Operand(zero_reg));
61 }
62
63 // When initially emitting this ensure that a jump is never generated to skip
64 // the inlined smi code.
65 void EmitJumpIfSmi(Register reg, Label* target) {
66 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 __ bind(&patch_site_);
69 __ andi(at, reg, 0);
70 // Never taken before patched.
71 __ BranchShort(target, ne, at, Operand(zero_reg));
72 }
73
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
77 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
78 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
79#ifdef DEBUG
80 info_emitted_ = true;
81#endif
82 } else {
83 __ nop(); // Signals no inlined code.
84 }
85 }
86
87 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010088 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089 MacroAssembler* masm_;
90 Label patch_site_;
91#ifdef DEBUG
92 bool info_emitted_;
93#endif
94};
95
96
97// Generate code for a JS function. On entry to the function the receiver
98// and arguments have been pushed on the stack left to right. The actual
99// argument count matches the formal parameter count expected by the
100// function.
101//
102// The live registers are:
103// o a1: the JS function object being called (i.e. ourselves)
104// o a3: the new target value
105// o cp: our context
106// o fp: our caller's frame pointer
107// o sp: stack pointer
108// o ra: return address
109//
110// The function builds a JS frame. Please see JavaScriptFrameConstants in
111// frames-mips.h for its layout.
112void FullCodeGenerator::Generate() {
113 CompilationInfo* info = info_;
114 profiling_counter_ = isolate()->factory()->NewCell(
115 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116 SetFunctionPosition(literal());
117 Comment cmnt(masm_, "[ function compiled by full code generator");
118
119 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000121 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
122 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
123 __ ld(a2, MemOperand(sp, receiver_offset));
124 __ AssertNotSmi(a2);
125 __ GetObjectType(a2, a2, a2);
126 __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
127 Operand(FIRST_JS_RECEIVER_TYPE));
128 }
129
130 // Open a frame scope to indicate that there is a frame on the stack. The
131 // MANUAL indicates that the scope shouldn't actually generate code to set up
132 // the frame (that is done below).
133 FrameScope frame_scope(masm_, StackFrame::MANUAL);
134 info->set_prologue_offset(masm_->pc_offset());
135 __ Prologue(info->GeneratePreagedPrologue());
136
137 { Comment cmnt(masm_, "[ Allocate locals");
138 int locals_count = info->scope()->num_stack_slots();
139 // Generators allocate locals, if any, in context slots.
140 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100141 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 if (locals_count > 0) {
143 if (locals_count >= 128) {
144 Label ok;
145 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
146 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
147 __ Branch(&ok, hs, t1, Operand(a2));
148 __ CallRuntime(Runtime::kThrowStackOverflow);
149 __ bind(&ok);
150 }
151 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
152 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
153 if (locals_count >= kMaxPushes) {
154 int loop_iterations = locals_count / kMaxPushes;
155 __ li(a2, Operand(loop_iterations));
156 Label loop_header;
157 __ bind(&loop_header);
158 // Do pushes.
159 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
160 for (int i = 0; i < kMaxPushes; i++) {
161 __ sd(t1, MemOperand(sp, i * kPointerSize));
162 }
163 // Continue loop if not done.
164 __ Dsubu(a2, a2, Operand(1));
165 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
166 }
167 int remaining = locals_count % kMaxPushes;
168 // Emit the remaining pushes.
169 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
170 for (int i = 0; i < remaining; i++) {
171 __ sd(t1, MemOperand(sp, i * kPointerSize));
172 }
173 }
174 }
175
176 bool function_in_register_a1 = true;
177
178 // Possibly allocate a local context.
179 if (info->scope()->num_heap_slots() > 0) {
180 Comment cmnt(masm_, "[ Allocate context");
181 // Argument to NewContext is the function, which is still in a1.
182 bool need_write_barrier = true;
183 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (info->scope()->is_script_scope()) {
185 __ push(a1);
186 __ Push(info->scope()->GetScopeInfo(info->isolate()));
187 __ CallRuntime(Runtime::kNewScriptContext);
188 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
189 // The new target value is not used, clobbering is safe.
190 DCHECK_NULL(info->scope()->new_target_var());
191 } else {
192 if (info->scope()->new_target_var() != nullptr) {
193 __ push(a3); // Preserve new target.
194 }
195 if (slots <= FastNewContextStub::kMaximumSlots) {
196 FastNewContextStub stub(isolate(), slots);
197 __ CallStub(&stub);
198 // Result of FastNewContextStub is always in new space.
199 need_write_barrier = false;
200 } else {
201 __ push(a1);
202 __ CallRuntime(Runtime::kNewFunctionContext);
203 }
204 if (info->scope()->new_target_var() != nullptr) {
205 __ pop(a3); // Restore new target.
206 }
207 }
208 function_in_register_a1 = false;
209 // Context is returned in v0. It replaces the context passed to us.
210 // It's saved in the stack and kept live in cp.
211 __ mov(cp, v0);
212 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
213 // Copy any necessary parameters into the context.
214 int num_parameters = info->scope()->num_parameters();
215 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
216 for (int i = first_parameter; i < num_parameters; i++) {
217 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
218 if (var->IsContextSlot()) {
219 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
220 (num_parameters - 1 - i) * kPointerSize;
221 // Load parameter from stack.
222 __ ld(a0, MemOperand(fp, parameter_offset));
223 // Store it in the context.
224 MemOperand target = ContextMemOperand(cp, var->index());
225 __ sd(a0, target);
226
227 // Update the write barrier.
228 if (need_write_barrier) {
229 __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
230 kRAHasBeenSaved, kDontSaveFPRegs);
231 } else if (FLAG_debug_code) {
232 Label done;
233 __ JumpIfInNewSpace(cp, a0, &done);
234 __ Abort(kExpectedNewSpaceObject);
235 __ bind(&done);
236 }
237 }
238 }
239 }
240
241 // Register holding this function and new target are both trashed in case we
242 // bailout here. But since that can happen only when new target is not used
243 // and we allocate a context, the value of |function_in_register| is correct.
244 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
245
246 // Possibly set up a local binding to the this function which is used in
247 // derived constructors with super calls.
248 Variable* this_function_var = scope()->this_function_var();
249 if (this_function_var != nullptr) {
250 Comment cmnt(masm_, "[ This function");
251 if (!function_in_register_a1) {
252 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253 // The write barrier clobbers register again, keep it marked as such.
254 }
255 SetVar(this_function_var, a1, a0, a2);
256 }
257
258 Variable* new_target_var = scope()->new_target_var();
259 if (new_target_var != nullptr) {
260 Comment cmnt(masm_, "[ new.target");
261 SetVar(new_target_var, a3, a0, a2);
262 }
263
264 // Possibly allocate RestParameters
265 int rest_index;
266 Variable* rest_param = scope()->rest_parameter(&rest_index);
267 if (rest_param) {
268 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 if (!function_in_register_a1) {
270 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
271 }
272 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100274 function_in_register_a1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 SetVar(rest_param, v0, a1, a2);
276 }
277
278 Variable* arguments = scope()->arguments();
279 if (arguments != NULL) {
280 // Function uses arguments object.
281 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000282 if (!function_in_register_a1) {
283 // Load this again, if it's used by the local context below.
284 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
285 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100286 if (is_strict(language_mode()) || !has_simple_parameters()) {
287 FastNewStrictArgumentsStub stub(isolate());
288 __ CallStub(&stub);
289 } else if (literal()->has_duplicate_parameters()) {
290 __ Push(a1);
291 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
292 } else {
293 FastNewSloppyArgumentsStub stub(isolate());
294 __ CallStub(&stub);
295 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296
297 SetVar(arguments, v0, a1, a2);
298 }
299
300 if (FLAG_trace) {
301 __ CallRuntime(Runtime::kTraceEnter);
302 }
303
304 // Visit the declarations and body unless there is an illegal
305 // redeclaration.
306 if (scope()->HasIllegalRedeclaration()) {
307 Comment cmnt(masm_, "[ Declarations");
308 VisitForEffect(scope()->GetIllegalRedeclaration());
309
310 } else {
311 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
312 { Comment cmnt(masm_, "[ Declarations");
313 VisitDeclarations(scope()->declarations());
314 }
315
316 // Assert that the declarations do not use ICs. Otherwise the debugger
317 // won't be able to redirect a PC at an IC to the correct IC in newly
318 // recompiled code.
319 DCHECK_EQ(0, ic_total_count_);
320
321 { Comment cmnt(masm_, "[ Stack check");
322 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
323 Label ok;
324 __ LoadRoot(at, Heap::kStackLimitRootIndex);
325 __ Branch(&ok, hs, sp, Operand(at));
326 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
327 PredictableCodeSizeScope predictable(masm_,
328 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
329 __ Call(stack_check, RelocInfo::CODE_TARGET);
330 __ bind(&ok);
331 }
332
333 { Comment cmnt(masm_, "[ Body");
334 DCHECK(loop_depth() == 0);
335
336 VisitStatements(literal()->body());
337
338 DCHECK(loop_depth() == 0);
339 }
340 }
341
342 // Always emit a 'return undefined' in case control fell off the end of
343 // the body.
344 { Comment cmnt(masm_, "[ return <undefined>;");
345 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
346 }
347 EmitReturnSequence();
348}
349
350
351void FullCodeGenerator::ClearAccumulator() {
352 DCHECK(Smi::FromInt(0) == 0);
353 __ mov(v0, zero_reg);
354}
355
356
357void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
358 __ li(a2, Operand(profiling_counter_));
359 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
360 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
361 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
362}
363
364
365void FullCodeGenerator::EmitProfilingCounterReset() {
366 int reset_value = FLAG_interrupt_budget;
367 if (info_->is_debug()) {
368 // Detect debug break requests as soon as possible.
369 reset_value = FLAG_interrupt_budget >> 4;
370 }
371 __ li(a2, Operand(profiling_counter_));
372 __ li(a3, Operand(Smi::FromInt(reset_value)));
373 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
374}
375
376
377void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
378 Label* back_edge_target) {
379 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
380 // to make sure it is constant. Branch may emit a skip-or-jump sequence
381 // instead of the normal Branch. It seems that the "skip" part of that
382 // sequence is about as long as this Branch would be so it is safe to ignore
383 // that.
384 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
385 Comment cmnt(masm_, "[ Back edge bookkeeping");
386 Label ok;
387 DCHECK(back_edge_target->is_bound());
388 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
389 int weight = Min(kMaxBackEdgeWeight,
390 Max(1, distance / kCodeSizeMultiplier));
391 EmitProfilingCounterDecrement(weight);
392 __ slt(at, a3, zero_reg);
393 __ beq(at, zero_reg, &ok);
394 // Call will emit a li t9 first, so it is safe to use the delay slot.
395 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
396 // Record a mapping of this PC offset to the OSR id. This is used to find
397 // the AST id from the unoptimized code in order to use it as a key into
398 // the deoptimization input data found in the optimized code.
399 RecordBackEdge(stmt->OsrEntryId());
400 EmitProfilingCounterReset();
401
402 __ bind(&ok);
403 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
404 // Record a mapping of the OSR id to this PC. This is used if the OSR
405 // entry becomes the target of a bailout. We don't expect it to be, but
406 // we want it to work if it is.
407 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
408}
409
Ben Murdoch097c5b22016-05-18 11:27:45 +0100410void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
411 bool is_tail_call) {
412 // Pretend that the exit is a backwards jump to the entry.
413 int weight = 1;
414 if (info_->ShouldSelfOptimize()) {
415 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
416 } else {
417 int distance = masm_->pc_offset();
418 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
419 }
420 EmitProfilingCounterDecrement(weight);
421 Label ok;
422 __ Branch(&ok, ge, a3, Operand(zero_reg));
423 // Don't need to save result register if we are going to do a tail call.
424 if (!is_tail_call) {
425 __ push(v0);
426 }
427 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
428 if (!is_tail_call) {
429 __ pop(v0);
430 }
431 EmitProfilingCounterReset();
432 __ bind(&ok);
433}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000434
435void FullCodeGenerator::EmitReturnSequence() {
436 Comment cmnt(masm_, "[ Return sequence");
437 if (return_label_.is_bound()) {
438 __ Branch(&return_label_);
439 } else {
440 __ bind(&return_label_);
441 if (FLAG_trace) {
442 // Push the return value on the stack as the parameter.
443 // Runtime::TraceExit returns its parameter in v0.
444 __ push(v0);
445 __ CallRuntime(Runtime::kTraceExit);
446 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100447 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448
449 // Make sure that the constant pool is not emitted inside of the return
450 // sequence.
451 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
452 // Here we use masm_-> instead of the __ macro to avoid the code coverage
453 // tool from instrumenting as we rely on the code size here.
454 int32_t arg_count = info_->scope()->num_parameters() + 1;
455 int32_t sp_delta = arg_count * kPointerSize;
456 SetReturnPosition(literal());
457 masm_->mov(sp, fp);
458 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
459 masm_->Daddu(sp, sp, Operand(sp_delta));
460 masm_->Jump(ra);
461 }
462 }
463}
464
465
466void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
467 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
468 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100469 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000470}
471
472
473void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
474}
475
476
477void FullCodeGenerator::AccumulatorValueContext::Plug(
478 Heap::RootListIndex index) const {
479 __ LoadRoot(result_register(), index);
480}
481
482
483void FullCodeGenerator::StackValueContext::Plug(
484 Heap::RootListIndex index) const {
485 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100486 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487}
488
489
490void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
491 codegen()->PrepareForBailoutBeforeSplit(condition(),
492 true,
493 true_label_,
494 false_label_);
495 if (index == Heap::kUndefinedValueRootIndex ||
496 index == Heap::kNullValueRootIndex ||
497 index == Heap::kFalseValueRootIndex) {
498 if (false_label_ != fall_through_) __ Branch(false_label_);
499 } else if (index == Heap::kTrueValueRootIndex) {
500 if (true_label_ != fall_through_) __ Branch(true_label_);
501 } else {
502 __ LoadRoot(result_register(), index);
503 codegen()->DoTest(this);
504 }
505}
506
507
508void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
509}
510
511
512void FullCodeGenerator::AccumulatorValueContext::Plug(
513 Handle<Object> lit) const {
514 __ li(result_register(), Operand(lit));
515}
516
517
518void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
519 // Immediates cannot be pushed directly.
520 __ li(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100521 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000522}
523
524
525void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
526 codegen()->PrepareForBailoutBeforeSplit(condition(),
527 true,
528 true_label_,
529 false_label_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100530 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000531 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
532 if (false_label_ != fall_through_) __ Branch(false_label_);
533 } else if (lit->IsTrue() || lit->IsJSObject()) {
534 if (true_label_ != fall_through_) __ Branch(true_label_);
535 } else if (lit->IsString()) {
536 if (String::cast(*lit)->length() == 0) {
537 if (false_label_ != fall_through_) __ Branch(false_label_);
538 } else {
539 if (true_label_ != fall_through_) __ Branch(true_label_);
540 }
541 } else if (lit->IsSmi()) {
542 if (Smi::cast(*lit)->value() == 0) {
543 if (false_label_ != fall_through_) __ Branch(false_label_);
544 } else {
545 if (true_label_ != fall_through_) __ Branch(true_label_);
546 }
547 } else {
548 // For simplicity we always test the accumulator register.
549 __ li(result_register(), Operand(lit));
550 codegen()->DoTest(this);
551 }
552}
553
554
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000555void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
556 Register reg) const {
557 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100558 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000559 __ sd(reg, MemOperand(sp, 0));
560}
561
562
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
564 Label* materialize_false) const {
565 DCHECK(materialize_true == materialize_false);
566 __ bind(materialize_true);
567}
568
569
570void FullCodeGenerator::AccumulatorValueContext::Plug(
571 Label* materialize_true,
572 Label* materialize_false) const {
573 Label done;
574 __ bind(materialize_true);
575 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
576 __ Branch(&done);
577 __ bind(materialize_false);
578 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
579 __ bind(&done);
580}
581
582
583void FullCodeGenerator::StackValueContext::Plug(
584 Label* materialize_true,
585 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100586 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000587 Label done;
588 __ bind(materialize_true);
589 __ LoadRoot(at, Heap::kTrueValueRootIndex);
590 // Push the value as the following branch can clobber at in long branch mode.
591 __ push(at);
592 __ Branch(&done);
593 __ bind(materialize_false);
594 __ LoadRoot(at, Heap::kFalseValueRootIndex);
595 __ push(at);
596 __ bind(&done);
597}
598
599
600void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
601 Label* materialize_false) const {
602 DCHECK(materialize_true == true_label_);
603 DCHECK(materialize_false == false_label_);
604}
605
606
607void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
608 Heap::RootListIndex value_root_index =
609 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
610 __ LoadRoot(result_register(), value_root_index);
611}
612
613
614void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
615 Heap::RootListIndex value_root_index =
616 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
617 __ LoadRoot(at, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100618 codegen()->PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619}
620
621
622void FullCodeGenerator::TestContext::Plug(bool flag) const {
623 codegen()->PrepareForBailoutBeforeSplit(condition(),
624 true,
625 true_label_,
626 false_label_);
627 if (flag) {
628 if (true_label_ != fall_through_) __ Branch(true_label_);
629 } else {
630 if (false_label_ != fall_through_) __ Branch(false_label_);
631 }
632}
633
634
635void FullCodeGenerator::DoTest(Expression* condition,
636 Label* if_true,
637 Label* if_false,
638 Label* fall_through) {
639 __ mov(a0, result_register());
640 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
641 CallIC(ic, condition->test_id());
642 __ LoadRoot(at, Heap::kTrueValueRootIndex);
643 Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
644}
645
646
647void FullCodeGenerator::Split(Condition cc,
648 Register lhs,
649 const Operand& rhs,
650 Label* if_true,
651 Label* if_false,
652 Label* fall_through) {
653 if (if_false == fall_through) {
654 __ Branch(if_true, cc, lhs, rhs);
655 } else if (if_true == fall_through) {
656 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
657 } else {
658 __ Branch(if_true, cc, lhs, rhs);
659 __ Branch(if_false);
660 }
661}
662
663
664MemOperand FullCodeGenerator::StackOperand(Variable* var) {
665 DCHECK(var->IsStackAllocated());
666 // Offset is negative because higher indexes are at lower addresses.
667 int offset = -var->index() * kPointerSize;
668 // Adjust by a (parameter or local) base offset.
669 if (var->IsParameter()) {
670 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
671 } else {
672 offset += JavaScriptFrameConstants::kLocal0Offset;
673 }
674 return MemOperand(fp, offset);
675}
676
677
678MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
679 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
680 if (var->IsContextSlot()) {
681 int context_chain_length = scope()->ContextChainLength(var->scope());
682 __ LoadContext(scratch, context_chain_length);
683 return ContextMemOperand(scratch, var->index());
684 } else {
685 return StackOperand(var);
686 }
687}
688
689
690void FullCodeGenerator::GetVar(Register dest, Variable* var) {
691 // Use destination as scratch.
692 MemOperand location = VarOperand(var, dest);
693 __ ld(dest, location);
694}
695
696
697void FullCodeGenerator::SetVar(Variable* var,
698 Register src,
699 Register scratch0,
700 Register scratch1) {
701 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702 DCHECK(!scratch0.is(src));
703 DCHECK(!scratch0.is(scratch1));
704 DCHECK(!scratch1.is(src));
705 MemOperand location = VarOperand(var, scratch0);
706 __ sd(src, location);
707 // Emit the write barrier code if the location is in the heap.
708 if (var->IsContextSlot()) {
709 __ RecordWriteContextSlot(scratch0,
710 location.offset(),
711 src,
712 scratch1,
713 kRAHasBeenSaved,
714 kDontSaveFPRegs);
715 }
716}
717
718
719void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
720 bool should_normalize,
721 Label* if_true,
722 Label* if_false) {
723 // Only prepare for bailouts before splits if we're in a test
724 // context. Otherwise, we let the Visit function deal with the
725 // preparation to avoid preparing with the same AST id twice.
726 if (!context()->IsTest()) return;
727
728 Label skip;
729 if (should_normalize) __ Branch(&skip);
730 PrepareForBailout(expr, TOS_REG);
731 if (should_normalize) {
732 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
733 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
734 __ bind(&skip);
735 }
736}
737
738
739void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
740 // The variable in the declaration always resides in the current function
741 // context.
742 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100743 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000744 // Check that we're not inside a with or catch context.
745 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
746 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
747 __ Check(ne, kDeclarationInWithContext,
748 a1, Operand(a4));
749 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
750 __ Check(ne, kDeclarationInCatchContext,
751 a1, Operand(a4));
752 }
753}
754
755
756void FullCodeGenerator::VisitVariableDeclaration(
757 VariableDeclaration* declaration) {
758 // If it was not possible to allocate the variable at compile time, we
759 // need to "declare" it at runtime to make sure it actually exists in the
760 // local context.
761 VariableProxy* proxy = declaration->proxy();
762 VariableMode mode = declaration->mode();
763 Variable* variable = proxy->var();
764 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
765 switch (variable->location()) {
766 case VariableLocation::GLOBAL:
767 case VariableLocation::UNALLOCATED:
768 globals_->Add(variable->name(), zone());
769 globals_->Add(variable->binding_needs_init()
770 ? isolate()->factory()->the_hole_value()
771 : isolate()->factory()->undefined_value(),
772 zone());
773 break;
774
775 case VariableLocation::PARAMETER:
776 case VariableLocation::LOCAL:
777 if (hole_init) {
778 Comment cmnt(masm_, "[ VariableDeclaration");
779 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
780 __ sd(a4, StackOperand(variable));
781 }
782 break;
783
784 case VariableLocation::CONTEXT:
785 if (hole_init) {
786 Comment cmnt(masm_, "[ VariableDeclaration");
787 EmitDebugCheckDeclarationContext(variable);
788 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
789 __ sd(at, ContextMemOperand(cp, variable->index()));
790 // No write barrier since the_hole_value is in old space.
791 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
792 }
793 break;
794
795 case VariableLocation::LOOKUP: {
796 Comment cmnt(masm_, "[ VariableDeclaration");
797 __ li(a2, Operand(variable->name()));
798 // Declaration nodes are always introduced in one of four modes.
799 DCHECK(IsDeclaredVariableMode(mode));
800 // Push initial value, if any.
801 // Note: For variables we must not push an initial value (such as
802 // 'undefined') because we may have a (legal) redeclaration and we
803 // must not destroy the current value.
804 if (hole_init) {
805 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
806 } else {
807 DCHECK(Smi::FromInt(0) == 0);
808 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
809 }
810 __ Push(a2, a0);
811 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
812 __ CallRuntime(Runtime::kDeclareLookupSlot);
813 break;
814 }
815 }
816}
817
818
819void FullCodeGenerator::VisitFunctionDeclaration(
820 FunctionDeclaration* declaration) {
821 VariableProxy* proxy = declaration->proxy();
822 Variable* variable = proxy->var();
823 switch (variable->location()) {
824 case VariableLocation::GLOBAL:
825 case VariableLocation::UNALLOCATED: {
826 globals_->Add(variable->name(), zone());
827 Handle<SharedFunctionInfo> function =
828 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
829 // Check for stack-overflow exception.
830 if (function.is_null()) return SetStackOverflow();
831 globals_->Add(function, zone());
832 break;
833 }
834
835 case VariableLocation::PARAMETER:
836 case VariableLocation::LOCAL: {
837 Comment cmnt(masm_, "[ FunctionDeclaration");
838 VisitForAccumulatorValue(declaration->fun());
839 __ sd(result_register(), StackOperand(variable));
840 break;
841 }
842
843 case VariableLocation::CONTEXT: {
844 Comment cmnt(masm_, "[ FunctionDeclaration");
845 EmitDebugCheckDeclarationContext(variable);
846 VisitForAccumulatorValue(declaration->fun());
847 __ sd(result_register(), ContextMemOperand(cp, variable->index()));
848 int offset = Context::SlotOffset(variable->index());
849 // We know that we have written a function, which is not a smi.
850 __ RecordWriteContextSlot(cp,
851 offset,
852 result_register(),
853 a2,
854 kRAHasBeenSaved,
855 kDontSaveFPRegs,
856 EMIT_REMEMBERED_SET,
857 OMIT_SMI_CHECK);
858 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
859 break;
860 }
861
862 case VariableLocation::LOOKUP: {
863 Comment cmnt(masm_, "[ FunctionDeclaration");
864 __ li(a2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100865 PushOperand(a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000866 // Push initial value for function declaration.
867 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100868 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
869 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000870 break;
871 }
872 }
873}
874
875
876void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
877 // Call the runtime to declare the globals.
878 __ li(a1, Operand(pairs));
879 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
880 __ Push(a1, a0);
881 __ CallRuntime(Runtime::kDeclareGlobals);
882 // Return value is ignored.
883}
884
885
886void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
887 // Call the runtime to declare the modules.
888 __ Push(descriptions);
889 __ CallRuntime(Runtime::kDeclareModules);
890 // Return value is ignored.
891}
892
893
894void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
895 Comment cmnt(masm_, "[ SwitchStatement");
896 Breakable nested_statement(this, stmt);
897 SetStatementPosition(stmt);
898
899 // Keep the switch value on the stack until a case matches.
900 VisitForStackValue(stmt->tag());
901 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
902
903 ZoneList<CaseClause*>* clauses = stmt->cases();
904 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
905
906 Label next_test; // Recycled for each test.
907 // Compile all the tests with branches to their bodies.
908 for (int i = 0; i < clauses->length(); i++) {
909 CaseClause* clause = clauses->at(i);
910 clause->body_target()->Unuse();
911
912 // The default is not a test, but remember it as final fall through.
913 if (clause->is_default()) {
914 default_clause = clause;
915 continue;
916 }
917
918 Comment cmnt(masm_, "[ Case comparison");
919 __ bind(&next_test);
920 next_test.Unuse();
921
922 // Compile the label expression.
923 VisitForAccumulatorValue(clause->label());
924 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
925
926 // Perform the comparison as if via '==='.
927 __ ld(a1, MemOperand(sp, 0)); // Switch value.
928 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
929 JumpPatchSite patch_site(masm_);
930 if (inline_smi_code) {
931 Label slow_case;
932 __ or_(a2, a1, a0);
933 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
934
935 __ Branch(&next_test, ne, a1, Operand(a0));
936 __ Drop(1); // Switch value is no longer needed.
937 __ Branch(clause->body_target());
938
939 __ bind(&slow_case);
940 }
941
942 // Record position before stub call for type feedback.
943 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100944 Handle<Code> ic =
945 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000946 CallIC(ic, clause->CompareId());
947 patch_site.EmitPatchInfo();
948
949 Label skip;
950 __ Branch(&skip);
951 PrepareForBailout(clause, TOS_REG);
952 __ LoadRoot(at, Heap::kTrueValueRootIndex);
953 __ Branch(&next_test, ne, v0, Operand(at));
954 __ Drop(1);
955 __ Branch(clause->body_target());
956 __ bind(&skip);
957
958 __ Branch(&next_test, ne, v0, Operand(zero_reg));
959 __ Drop(1); // Switch value is no longer needed.
960 __ Branch(clause->body_target());
961 }
962
963 // Discard the test value and jump to the default if present, otherwise to
964 // the end of the statement.
965 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100966 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000967 if (default_clause == NULL) {
968 __ Branch(nested_statement.break_label());
969 } else {
970 __ Branch(default_clause->body_target());
971 }
972
973 // Compile all the case bodies.
974 for (int i = 0; i < clauses->length(); i++) {
975 Comment cmnt(masm_, "[ Case body");
976 CaseClause* clause = clauses->at(i);
977 __ bind(clause->body_target());
978 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
979 VisitStatements(clause->statements());
980 }
981
982 __ bind(nested_statement.break_label());
983 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
984}
985
986
987void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
988 Comment cmnt(masm_, "[ ForInStatement");
989 SetStatementPosition(stmt, SKIP_BREAK);
990
991 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
992
993 Label loop, exit;
994 ForIn loop_statement(this, stmt);
995 increment_loop_depth();
996
997 // Get the object to enumerate over. If the object is null or undefined, skip
998 // over the loop. See ECMA-262 version 5, section 12.6.4.
999 SetExpressionAsStatementPosition(stmt->enumerable());
1000 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001001 __ mov(a0, result_register());
1002 OperandStackDepthIncrement(ForIn::kElementCount);
1003
1004 // If the object is null or undefined, skip over the loop, otherwise convert
1005 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 Label convert, done_convert;
1007 __ JumpIfSmi(a0, &convert);
1008 __ GetObjectType(a0, a1, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001009 __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1010 Operand(FIRST_JS_RECEIVER_TYPE));
1011 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot.
1012 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1013 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot.
1014 __ Branch(&exit, eq, a0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001015 __ bind(&convert);
1016 ToObjectStub stub(isolate());
1017 __ CallStub(&stub);
1018 __ mov(a0, v0);
1019 __ bind(&done_convert);
1020 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1021 __ push(a0);
1022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001023 // Check cache validity in generated code. This is a fast case for
1024 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1025 // guarantee cache validity, call the runtime system to check cache
1026 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001027 // Note: Proxies never have an enum cache, so will always take the
1028 // slow path.
1029 Label call_runtime;
1030 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001031
1032 // The enum cache is valid. Load the map of the object being
1033 // iterated over and use the cache for the iteration.
1034 Label use_cache;
1035 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1036 __ Branch(&use_cache);
1037
1038 // Get the set of properties to enumerate.
1039 __ bind(&call_runtime);
1040 __ push(a0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001041 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1043
1044 // If we got a map from the runtime call, we can do a fast
1045 // modification check. Otherwise, we got a fixed array, and we have
1046 // to do a slow check.
1047 Label fixed_array;
1048 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1049 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1050 __ Branch(&fixed_array, ne, a2, Operand(at));
1051
1052 // We got a map in register v0. Get the enumeration cache from it.
1053 Label no_descriptors;
1054 __ bind(&use_cache);
1055
1056 __ EnumLength(a1, v0);
1057 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1058
1059 __ LoadInstanceDescriptors(v0, a2);
1060 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1061 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1062
1063 // Set up the four remaining stack slots.
1064 __ li(a0, Operand(Smi::FromInt(0)));
1065 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1066 __ Push(v0, a2, a1, a0);
1067 __ jmp(&loop);
1068
1069 __ bind(&no_descriptors);
1070 __ Drop(1);
1071 __ jmp(&exit);
1072
1073 // We got a fixed array in register v0. Iterate through that.
1074 __ bind(&fixed_array);
1075
Ben Murdoch097c5b22016-05-18 11:27:45 +01001076 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 __ EmitLoadTypeFeedbackVector(a1);
1078 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1080
1081 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1082 __ Push(a1, v0); // Smi and array
1083 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001084 __ Push(a1); // Fixed array length (as smi).
1085 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001086 __ li(a0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001087 __ Push(a0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088
1089 // Generate code for doing the condition check.
1090 __ bind(&loop);
1091 SetExpressionAsStatementPosition(stmt->each());
1092
1093 // Load the current count to a0, load the length to a1.
1094 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1095 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1096 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1097
1098 // Get the current entry of the array into register a3.
1099 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1100 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1101 __ SmiScale(a4, a0, kPointerSizeLog2);
1102 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1103 __ ld(a3, MemOperand(a4)); // Current entry.
1104
1105 // Get the expected map from the stack or a smi in the
1106 // permanent slow case into register a2.
1107 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1108
1109 // Check if the expected map still matches that of the enumerable.
1110 // If not, we may have to filter the key.
1111 Label update_each;
1112 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1113 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1114 __ Branch(&update_each, eq, a4, Operand(a2));
1115
Ben Murdoch097c5b22016-05-18 11:27:45 +01001116 // We might get here from TurboFan or Crankshaft when something in the
1117 // for-in loop body deopts and only now notice in fullcodegen, that we
1118 // can now longer use the enum cache, i.e. left fast mode. So better record
1119 // this information here, in case we later OSR back into this loop or
1120 // reoptimize the whole function w/o rerunning the loop with the slow
1121 // mode object in fullcodegen (which would result in a deopt loop).
1122 __ EmitLoadTypeFeedbackVector(a0);
1123 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1124 __ sd(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index)));
1125
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001126 // Convert the entry to a string or (smi) 0 if it isn't a property
1127 // any more. If the property has been removed while iterating, we
1128 // just skip it.
1129 __ Push(a1, a3); // Enumerable and current entry.
1130 __ CallRuntime(Runtime::kForInFilter);
1131 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1132 __ mov(a3, result_register());
1133 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1134 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1135
1136 // Update the 'each' property or variable from the possibly filtered
1137 // entry in register a3.
1138 __ bind(&update_each);
1139 __ mov(result_register(), a3);
1140 // Perform the assignment as if via '='.
1141 { EffectContext context(this);
1142 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1143 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1144 }
1145
1146 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1147 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1148 // Generate code for the body of the loop.
1149 Visit(stmt->body());
1150
1151 // Generate code for the going to the next element by incrementing
1152 // the index (smi) stored on top of the stack.
1153 __ bind(loop_statement.continue_label());
1154 __ pop(a0);
1155 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1156 __ push(a0);
1157
1158 EmitBackEdgeBookkeeping(stmt, &loop);
1159 __ Branch(&loop);
1160
1161 // Remove the pointers stored on the stack.
1162 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001163 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001164
1165 // Exit and decrement the loop depth.
1166 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1167 __ bind(&exit);
1168 decrement_loop_depth();
1169}
1170
1171
1172void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1173 bool pretenure) {
1174 // Use the fast case closure allocation code that allocates in new
1175 // space for nested functions that don't need literals cloning. If
1176 // we're running with the --always-opt or the --prepare-always-opt
1177 // flag, we need to use the runtime function so that the new function
1178 // we are creating here gets a chance to have its code optimized and
1179 // doesn't just get a copy of the existing unoptimized code.
1180 if (!FLAG_always_opt &&
1181 !FLAG_prepare_always_opt &&
1182 !pretenure &&
1183 scope()->is_function_scope() &&
1184 info->num_literals() == 0) {
1185 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1186 __ li(a2, Operand(info));
1187 __ CallStub(&stub);
1188 } else {
1189 __ Push(info);
1190 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1191 : Runtime::kNewClosure);
1192 }
1193 context()->Plug(v0);
1194}
1195
1196
1197void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1198 FeedbackVectorSlot slot) {
1199 DCHECK(NeedsHomeObject(initializer));
1200 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1201 __ li(StoreDescriptor::NameRegister(),
1202 Operand(isolate()->factory()->home_object_symbol()));
1203 __ ld(StoreDescriptor::ValueRegister(),
1204 MemOperand(sp, offset * kPointerSize));
1205 EmitLoadStoreICSlot(slot);
1206 CallStoreIC();
1207}
1208
1209
1210void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1211 int offset,
1212 FeedbackVectorSlot slot) {
1213 DCHECK(NeedsHomeObject(initializer));
1214 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1215 __ li(StoreDescriptor::NameRegister(),
1216 Operand(isolate()->factory()->home_object_symbol()));
1217 __ ld(StoreDescriptor::ValueRegister(),
1218 MemOperand(sp, offset * kPointerSize));
1219 EmitLoadStoreICSlot(slot);
1220 CallStoreIC();
1221}
1222
1223
1224void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1225 TypeofMode typeof_mode,
1226 Label* slow) {
1227 Register current = cp;
1228 Register next = a1;
1229 Register temp = a2;
1230
1231 Scope* s = scope();
1232 while (s != NULL) {
1233 if (s->num_heap_slots() > 0) {
1234 if (s->calls_sloppy_eval()) {
1235 // Check that extension is "the hole".
1236 __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1237 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1238 }
1239 // Load next context in chain.
1240 __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1241 // Walk the rest of the chain without clobbering cp.
1242 current = next;
1243 }
1244 // If no outer scope calls eval, we do not need to check more
1245 // context extensions.
1246 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1247 s = s->outer_scope();
1248 }
1249
1250 if (s->is_eval_scope()) {
1251 Label loop, fast;
1252 if (!current.is(next)) {
1253 __ Move(next, current);
1254 }
1255 __ bind(&loop);
1256 // Terminate at native context.
1257 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1258 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1259 __ Branch(&fast, eq, temp, Operand(a4));
1260 // Check that extension is "the hole".
1261 __ ld(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1262 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1263 // Load next context in chain.
1264 __ ld(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1265 __ Branch(&loop);
1266 __ bind(&fast);
1267 }
1268
1269 // All extension objects were empty and it is safe to use a normal global
1270 // load machinery.
1271 EmitGlobalVariableLoad(proxy, typeof_mode);
1272}
1273
1274
1275MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1276 Label* slow) {
1277 DCHECK(var->IsContextSlot());
1278 Register context = cp;
1279 Register next = a3;
1280 Register temp = a4;
1281
1282 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1283 if (s->num_heap_slots() > 0) {
1284 if (s->calls_sloppy_eval()) {
1285 // Check that extension is "the hole".
1286 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1287 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1288 }
1289 __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1290 // Walk the rest of the chain without clobbering cp.
1291 context = next;
1292 }
1293 }
1294 // Check that last extension is "the hole".
1295 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1296 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1297
1298 // This function is used only for loads, not stores, so it's safe to
1299 // return an cp-based operand (the write barrier cannot be allowed to
1300 // destroy the cp register).
1301 return ContextMemOperand(context, var->index());
1302}
1303
1304
1305void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1306 TypeofMode typeof_mode,
1307 Label* slow, Label* done) {
1308 // Generate fast-case code for variables that might be shadowed by
1309 // eval-introduced variables. Eval is used a lot without
1310 // introducing variables. In those cases, we do not want to
1311 // perform a runtime call for all variables in the scope
1312 // containing the eval.
1313 Variable* var = proxy->var();
1314 if (var->mode() == DYNAMIC_GLOBAL) {
1315 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1316 __ Branch(done);
1317 } else if (var->mode() == DYNAMIC_LOCAL) {
1318 Variable* local = var->local_if_not_shadowed();
1319 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1320 if (local->mode() == LET || local->mode() == CONST ||
1321 local->mode() == CONST_LEGACY) {
1322 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1323 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1324 if (local->mode() == CONST_LEGACY) {
1325 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1326 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1327 } else { // LET || CONST
1328 __ Branch(done, ne, at, Operand(zero_reg));
1329 __ li(a0, Operand(var->name()));
1330 __ push(a0);
1331 __ CallRuntime(Runtime::kThrowReferenceError);
1332 }
1333 }
1334 __ Branch(done);
1335 }
1336}
1337
1338
1339void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1340 TypeofMode typeof_mode) {
1341 Variable* var = proxy->var();
1342 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1343 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1344 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1345 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1346 __ li(LoadDescriptor::SlotRegister(),
1347 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1348 CallLoadIC(typeof_mode);
1349}
1350
1351
1352void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1353 TypeofMode typeof_mode) {
1354 // Record position before possible IC call.
1355 SetExpressionPosition(proxy);
1356 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1357 Variable* var = proxy->var();
1358
1359 // Three cases: global variables, lookup variables, and all other types of
1360 // variables.
1361 switch (var->location()) {
1362 case VariableLocation::GLOBAL:
1363 case VariableLocation::UNALLOCATED: {
1364 Comment cmnt(masm_, "[ Global variable");
1365 EmitGlobalVariableLoad(proxy, typeof_mode);
1366 context()->Plug(v0);
1367 break;
1368 }
1369
1370 case VariableLocation::PARAMETER:
1371 case VariableLocation::LOCAL:
1372 case VariableLocation::CONTEXT: {
1373 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1374 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1375 : "[ Stack variable");
1376 if (NeedsHoleCheckForLoad(proxy)) {
1377 // Let and const need a read barrier.
1378 GetVar(v0, var);
1379 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1380 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1381 if (var->mode() == LET || var->mode() == CONST) {
1382 // Throw a reference error when using an uninitialized let/const
1383 // binding in harmony mode.
1384 Label done;
1385 __ Branch(&done, ne, at, Operand(zero_reg));
1386 __ li(a0, Operand(var->name()));
1387 __ push(a0);
1388 __ CallRuntime(Runtime::kThrowReferenceError);
1389 __ bind(&done);
1390 } else {
1391 // Uninitialized legacy const bindings are unholed.
1392 DCHECK(var->mode() == CONST_LEGACY);
1393 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1394 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1395 }
1396 context()->Plug(v0);
1397 break;
1398 }
1399 context()->Plug(var);
1400 break;
1401 }
1402
1403 case VariableLocation::LOOKUP: {
1404 Comment cmnt(masm_, "[ Lookup variable");
1405 Label done, slow;
1406 // Generate code for loading from variables potentially shadowed
1407 // by eval-introduced variables.
1408 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1409 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001410 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001411 Runtime::FunctionId function_id =
1412 typeof_mode == NOT_INSIDE_TYPEOF
1413 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001414 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 __ CallRuntime(function_id);
1416 __ bind(&done);
1417 context()->Plug(v0);
1418 }
1419 }
1420}
1421
1422
1423void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1424 Comment cmnt(masm_, "[ RegExpLiteral");
1425 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1426 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1427 __ li(a1, Operand(expr->pattern()));
1428 __ li(a0, Operand(Smi::FromInt(expr->flags())));
1429 FastCloneRegExpStub stub(isolate());
1430 __ CallStub(&stub);
1431 context()->Plug(v0);
1432}
1433
1434
1435void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1436 Expression* expression = (property == NULL) ? NULL : property->value();
1437 if (expression == NULL) {
1438 __ LoadRoot(a1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001439 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440 } else {
1441 VisitForStackValue(expression);
1442 if (NeedsHomeObject(expression)) {
1443 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1444 property->kind() == ObjectLiteral::Property::SETTER);
1445 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1446 EmitSetHomeObject(expression, offset, property->GetSlot());
1447 }
1448 }
1449}
1450
1451
1452void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1453 Comment cmnt(masm_, "[ ObjectLiteral");
1454
1455 Handle<FixedArray> constant_properties = expr->constant_properties();
1456 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1457 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1458 __ li(a1, Operand(constant_properties));
1459 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1460 if (MustCreateObjectLiteralWithRuntime(expr)) {
1461 __ Push(a3, a2, a1, a0);
1462 __ CallRuntime(Runtime::kCreateObjectLiteral);
1463 } else {
1464 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1465 __ CallStub(&stub);
1466 }
1467 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1468
1469 // If result_saved is true the result is on top of the stack. If
1470 // result_saved is false the result is in v0.
1471 bool result_saved = false;
1472
1473 AccessorTable accessor_table(zone());
1474 int property_index = 0;
1475 for (; property_index < expr->properties()->length(); property_index++) {
1476 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1477 if (property->is_computed_name()) break;
1478 if (property->IsCompileTimeValue()) continue;
1479
1480 Literal* key = property->key()->AsLiteral();
1481 Expression* value = property->value();
1482 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001483 PushOperand(v0); // Save result on stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 result_saved = true;
1485 }
1486 switch (property->kind()) {
1487 case ObjectLiteral::Property::CONSTANT:
1488 UNREACHABLE();
1489 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1490 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1491 // Fall through.
1492 case ObjectLiteral::Property::COMPUTED:
1493 // It is safe to use [[Put]] here because the boilerplate already
1494 // contains computed properties with an uninitialized value.
1495 if (key->value()->IsInternalizedString()) {
1496 if (property->emit_store()) {
1497 VisitForAccumulatorValue(value);
1498 __ mov(StoreDescriptor::ValueRegister(), result_register());
1499 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1500 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1501 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1502 EmitLoadStoreICSlot(property->GetSlot(0));
1503 CallStoreIC();
1504 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1505
1506 if (NeedsHomeObject(value)) {
1507 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1508 }
1509 } else {
1510 VisitForEffect(value);
1511 }
1512 break;
1513 }
1514 // Duplicate receiver on stack.
1515 __ ld(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001516 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001517 VisitForStackValue(key);
1518 VisitForStackValue(value);
1519 if (property->emit_store()) {
1520 if (NeedsHomeObject(value)) {
1521 EmitSetHomeObject(value, 2, property->GetSlot());
1522 }
1523 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001524 PushOperand(a0);
1525 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001527 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528 }
1529 break;
1530 case ObjectLiteral::Property::PROTOTYPE:
1531 // Duplicate receiver on stack.
1532 __ ld(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001533 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 VisitForStackValue(value);
1535 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001536 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1538 NO_REGISTERS);
1539 break;
1540 case ObjectLiteral::Property::GETTER:
1541 if (property->emit_store()) {
1542 accessor_table.lookup(key)->second->getter = property;
1543 }
1544 break;
1545 case ObjectLiteral::Property::SETTER:
1546 if (property->emit_store()) {
1547 accessor_table.lookup(key)->second->setter = property;
1548 }
1549 break;
1550 }
1551 }
1552
1553 // Emit code to define accessors, using only a single call to the runtime for
1554 // each pair of corresponding getters and setters.
1555 for (AccessorTable::Iterator it = accessor_table.begin();
1556 it != accessor_table.end();
1557 ++it) {
1558 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001559 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560 VisitForStackValue(it->first);
1561 EmitAccessor(it->second->getter);
1562 EmitAccessor(it->second->setter);
1563 __ li(a0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001564 PushOperand(a0);
1565 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001566 }
1567
1568 // Object literals have two parts. The "static" part on the left contains no
1569 // computed property names, and so we can compute its map ahead of time; see
1570 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1571 // starts with the first computed property name, and continues with all
1572 // properties to its right. All the code from above initializes the static
1573 // component of the object literal, and arranges for the map of the result to
1574 // reflect the static order in which the keys appear. For the dynamic
1575 // properties, we compile them into a series of "SetOwnProperty" runtime
1576 // calls. This will preserve insertion order.
1577 for (; property_index < expr->properties()->length(); property_index++) {
1578 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1579
1580 Expression* value = property->value();
1581 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001582 PushOperand(v0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001583 result_saved = true;
1584 }
1585
1586 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001587 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001588
1589 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1590 DCHECK(!property->is_computed_name());
1591 VisitForStackValue(value);
1592 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001593 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001594 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1595 NO_REGISTERS);
1596 } else {
1597 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1598 VisitForStackValue(value);
1599 if (NeedsHomeObject(value)) {
1600 EmitSetHomeObject(value, 2, property->GetSlot());
1601 }
1602
1603 switch (property->kind()) {
1604 case ObjectLiteral::Property::CONSTANT:
1605 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1606 case ObjectLiteral::Property::COMPUTED:
1607 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001608 PushOperand(Smi::FromInt(NONE));
1609 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1610 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001611 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001612 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001613 }
1614 break;
1615
1616 case ObjectLiteral::Property::PROTOTYPE:
1617 UNREACHABLE();
1618 break;
1619
1620 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001621 PushOperand(Smi::FromInt(NONE));
1622 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001623 break;
1624
1625 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001626 PushOperand(Smi::FromInt(NONE));
1627 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001628 break;
1629 }
1630 }
1631 }
1632
1633 if (expr->has_function()) {
1634 DCHECK(result_saved);
1635 __ ld(a0, MemOperand(sp));
1636 __ push(a0);
1637 __ CallRuntime(Runtime::kToFastProperties);
1638 }
1639
1640 if (result_saved) {
1641 context()->PlugTOS();
1642 } else {
1643 context()->Plug(v0);
1644 }
1645}
1646
1647
1648void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1649 Comment cmnt(masm_, "[ ArrayLiteral");
1650
1651 Handle<FixedArray> constant_elements = expr->constant_elements();
1652 bool has_fast_elements =
1653 IsFastObjectElementsKind(expr->constant_elements_kind());
1654
1655 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1656 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1657 // If the only customer of allocation sites is transitioning, then
1658 // we can turn it off if we don't have anywhere else to transition to.
1659 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1660 }
1661
1662 __ mov(a0, result_register());
1663 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1664 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1665 __ li(a1, Operand(constant_elements));
1666 if (MustCreateArrayLiteralWithRuntime(expr)) {
1667 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1668 __ Push(a3, a2, a1, a0);
1669 __ CallRuntime(Runtime::kCreateArrayLiteral);
1670 } else {
1671 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1672 __ CallStub(&stub);
1673 }
1674 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1675
1676 bool result_saved = false; // Is the result saved to the stack?
1677 ZoneList<Expression*>* subexprs = expr->values();
1678 int length = subexprs->length();
1679
1680 // Emit code to evaluate all the non-constant subexpressions and to store
1681 // them into the newly cloned array.
1682 int array_index = 0;
1683 for (; array_index < length; array_index++) {
1684 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001685 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001686
1687 // If the subexpression is a literal or a simple materialized literal it
1688 // is already set in the cloned array.
1689 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1690
1691 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001692 PushOperand(v0); // array literal
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693 result_saved = true;
1694 }
1695
1696 VisitForAccumulatorValue(subexpr);
1697
1698 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1699 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1700 __ mov(StoreDescriptor::ValueRegister(), result_register());
1701 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1702 Handle<Code> ic =
1703 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1704 CallIC(ic);
1705
1706 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1707 }
1708
1709 // In case the array literal contains spread expressions it has two parts. The
1710 // first part is the "static" array which has a literal index is handled
1711 // above. The second part is the part after the first spread expression
1712 // (inclusive) and these elements gets appended to the array. Note that the
1713 // number elements an iterable produces is unknown ahead of time.
1714 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001715 PopOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 result_saved = false;
1717 }
1718 for (; array_index < length; array_index++) {
1719 Expression* subexpr = subexprs->at(array_index);
1720
Ben Murdoch097c5b22016-05-18 11:27:45 +01001721 PushOperand(v0);
1722 DCHECK(!subexpr->IsSpread());
1723 VisitForStackValue(subexpr);
1724 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001725
1726 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1727 }
1728
1729 if (result_saved) {
1730 context()->PlugTOS();
1731 } else {
1732 context()->Plug(v0);
1733 }
1734}
1735
1736
1737void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1738 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1739
1740 Comment cmnt(masm_, "[ Assignment");
1741 SetExpressionPosition(expr, INSERT_BREAK);
1742
1743 Property* property = expr->target()->AsProperty();
1744 LhsKind assign_type = Property::GetAssignType(property);
1745
1746 // Evaluate LHS expression.
1747 switch (assign_type) {
1748 case VARIABLE:
1749 // Nothing to do here.
1750 break;
1751 case NAMED_PROPERTY:
1752 if (expr->is_compound()) {
1753 // We need the receiver both on the stack and in the register.
1754 VisitForStackValue(property->obj());
1755 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1756 } else {
1757 VisitForStackValue(property->obj());
1758 }
1759 break;
1760 case NAMED_SUPER_PROPERTY:
1761 VisitForStackValue(
1762 property->obj()->AsSuperPropertyReference()->this_var());
1763 VisitForAccumulatorValue(
1764 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001765 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001766 if (expr->is_compound()) {
1767 const Register scratch = a1;
1768 __ ld(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001769 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001770 }
1771 break;
1772 case KEYED_SUPER_PROPERTY: {
1773 const Register scratch = a1;
1774 VisitForStackValue(
1775 property->obj()->AsSuperPropertyReference()->this_var());
1776 VisitForAccumulatorValue(
1777 property->obj()->AsSuperPropertyReference()->home_object());
1778 __ Move(scratch, result_register());
1779 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001780 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001781 if (expr->is_compound()) {
1782 const Register scratch1 = a4;
1783 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001784 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001785 }
1786 break;
1787 }
1788 case KEYED_PROPERTY:
1789 // We need the key and receiver on both the stack and in v0 and a1.
1790 if (expr->is_compound()) {
1791 VisitForStackValue(property->obj());
1792 VisitForStackValue(property->key());
1793 __ ld(LoadDescriptor::ReceiverRegister(),
1794 MemOperand(sp, 1 * kPointerSize));
1795 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1796 } else {
1797 VisitForStackValue(property->obj());
1798 VisitForStackValue(property->key());
1799 }
1800 break;
1801 }
1802
1803 // For compound assignments we need another deoptimization point after the
1804 // variable/property load.
1805 if (expr->is_compound()) {
1806 { AccumulatorValueContext context(this);
1807 switch (assign_type) {
1808 case VARIABLE:
1809 EmitVariableLoad(expr->target()->AsVariableProxy());
1810 PrepareForBailout(expr->target(), TOS_REG);
1811 break;
1812 case NAMED_PROPERTY:
1813 EmitNamedPropertyLoad(property);
1814 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1815 break;
1816 case NAMED_SUPER_PROPERTY:
1817 EmitNamedSuperPropertyLoad(property);
1818 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1819 break;
1820 case KEYED_SUPER_PROPERTY:
1821 EmitKeyedSuperPropertyLoad(property);
1822 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1823 break;
1824 case KEYED_PROPERTY:
1825 EmitKeyedPropertyLoad(property);
1826 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1827 break;
1828 }
1829 }
1830
1831 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001832 PushOperand(v0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001833 VisitForAccumulatorValue(expr->value());
1834
1835 AccumulatorValueContext context(this);
1836 if (ShouldInlineSmiCase(op)) {
1837 EmitInlineSmiBinaryOp(expr->binary_operation(),
1838 op,
1839 expr->target(),
1840 expr->value());
1841 } else {
1842 EmitBinaryOp(expr->binary_operation(), op);
1843 }
1844
1845 // Deoptimization point in case the binary operation may have side effects.
1846 PrepareForBailout(expr->binary_operation(), TOS_REG);
1847 } else {
1848 VisitForAccumulatorValue(expr->value());
1849 }
1850
1851 SetExpressionPosition(expr);
1852
1853 // Store the value.
1854 switch (assign_type) {
1855 case VARIABLE:
1856 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1857 expr->op(), expr->AssignmentSlot());
1858 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1859 context()->Plug(v0);
1860 break;
1861 case NAMED_PROPERTY:
1862 EmitNamedPropertyAssignment(expr);
1863 break;
1864 case NAMED_SUPER_PROPERTY:
1865 EmitNamedSuperPropertyStore(property);
1866 context()->Plug(v0);
1867 break;
1868 case KEYED_SUPER_PROPERTY:
1869 EmitKeyedSuperPropertyStore(property);
1870 context()->Plug(v0);
1871 break;
1872 case KEYED_PROPERTY:
1873 EmitKeyedPropertyAssignment(expr);
1874 break;
1875 }
1876}
1877
1878
1879void FullCodeGenerator::VisitYield(Yield* expr) {
1880 Comment cmnt(masm_, "[ Yield");
1881 SetExpressionPosition(expr);
1882
1883 // Evaluate yielded value first; the initial iterator definition depends on
1884 // this. It stays on the stack while we update the iterator.
1885 VisitForStackValue(expr->expression());
1886
1887 switch (expr->yield_kind()) {
1888 case Yield::kSuspend:
1889 // Pop value from top-of-stack slot; box result into result register.
1890 EmitCreateIteratorResult(false);
1891 __ push(result_register());
1892 // Fall through.
1893 case Yield::kInitial: {
1894 Label suspend, continuation, post_runtime, resume;
1895
1896 __ jmp(&suspend);
1897 __ bind(&continuation);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001898 // When we arrive here, the stack top is the resume mode and
1899 // result_register() holds the input value (the argument given to the
1900 // respective resume operation).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001901 __ RecordGeneratorContinuation();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001902 __ pop(a1);
1903 __ Branch(&resume, ne, a1,
1904 Operand(Smi::FromInt(JSGeneratorObject::RETURN)));
1905 __ push(result_register());
1906 EmitCreateIteratorResult(true);
1907 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001908
1909 __ bind(&suspend);
1910 VisitForAccumulatorValue(expr->generator_object());
1911 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1912 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1913 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1914 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1915 __ mov(a1, cp);
1916 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1917 kRAHasBeenSaved, kDontSaveFPRegs);
1918 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1919 __ Branch(&post_runtime, eq, sp, Operand(a1));
1920 __ push(v0); // generator object
1921 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1922 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1923 __ bind(&post_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001924 PopOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001925 EmitReturnSequence();
1926
1927 __ bind(&resume);
1928 context()->Plug(result_register());
1929 break;
1930 }
1931
1932 case Yield::kFinal: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001933 // Pop value from top-of-stack slot, box result into result register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001934 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001935 EmitCreateIteratorResult(true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001936 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001937 break;
1938 }
1939
Ben Murdoch097c5b22016-05-18 11:27:45 +01001940 case Yield::kDelegating:
1941 UNREACHABLE();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001942 }
1943}
1944
1945
1946void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1947 Expression *value,
1948 JSGeneratorObject::ResumeMode resume_mode) {
1949 // The value stays in a0, and is ultimately read by the resumed generator, as
1950 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1951 // is read to throw the value when the resumed generator is already closed.
1952 // a1 will hold the generator object until the activation has been resumed.
1953 VisitForStackValue(generator);
1954 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001955 PopOperand(a1);
1956
1957 // Store input value into generator object.
1958 __ sd(result_register(),
1959 FieldMemOperand(a1, JSGeneratorObject::kInputOffset));
1960 __ mov(a2, result_register());
1961 __ RecordWriteField(a1, JSGeneratorObject::kInputOffset, a2, a3,
1962 kRAHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001963
1964 // Load suspended function and context.
1965 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
1966 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
1967
1968 // Load receiver and store as the first argument.
1969 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
1970 __ push(a2);
1971
1972 // Push holes for the rest of the arguments to the generator function.
1973 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
1974 // The argument count is stored as int32_t on 64-bit platforms.
1975 // TODO(plind): Smi on 32-bit platforms.
1976 __ lw(a3,
1977 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1978 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
1979 Label push_argument_holes, push_frame;
1980 __ bind(&push_argument_holes);
1981 __ Dsubu(a3, a3, Operand(1));
1982 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
1983 __ push(a2);
1984 __ jmp(&push_argument_holes);
1985
1986 // Enter a new JavaScript frame, and initialize its slots as they were when
1987 // the generator was suspended.
1988 Label resume_frame, done;
1989 __ bind(&push_frame);
1990 __ Call(&resume_frame);
1991 __ jmp(&done);
1992 __ bind(&resume_frame);
1993 // ra = return address.
1994 // fp = caller's frame pointer.
1995 // cp = callee's context,
1996 // a4 = callee's JS function.
1997 __ Push(ra, fp, cp, a4);
1998 // Adjust FP to point to saved FP.
1999 __ Daddu(fp, sp, 2 * kPointerSize);
2000
2001 // Load the operand stack size.
2002 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2003 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2004 __ SmiUntag(a3);
2005
2006 // If we are sending a value and there is no operand stack, we can jump back
2007 // in directly.
2008 if (resume_mode == JSGeneratorObject::NEXT) {
2009 Label slow_resume;
2010 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2011 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2012 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2013 __ SmiUntag(a2);
2014 __ Daddu(a3, a3, Operand(a2));
2015 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2016 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002017 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002018 __ Jump(a3);
2019 __ bind(&slow_resume);
2020 }
2021
2022 // Otherwise, we push holes for the operand stack and call the runtime to fix
2023 // up the stack and the handlers.
2024 Label push_operand_holes, call_resume;
2025 __ bind(&push_operand_holes);
2026 __ Dsubu(a3, a3, Operand(1));
2027 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2028 __ push(a2);
2029 __ Branch(&push_operand_holes);
2030 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002031 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002032 DCHECK(!result_register().is(a1));
2033 __ Push(a1, result_register());
2034 __ Push(Smi::FromInt(resume_mode));
2035 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2036 // Not reached: the runtime call returns elsewhere.
2037 __ stop("not-reached");
2038
2039 __ bind(&done);
2040 context()->Plug(result_register());
2041}
2042
Ben Murdoch097c5b22016-05-18 11:27:45 +01002043void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
2044 OperandStackDepthIncrement(2);
2045 __ Push(reg1, reg2);
2046}
2047
2048void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
2049 Register reg3) {
2050 OperandStackDepthIncrement(3);
2051 __ Push(reg1, reg2, reg3);
2052}
2053
2054void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
2055 Register reg3, Register reg4) {
2056 OperandStackDepthIncrement(4);
2057 __ Push(reg1, reg2, reg3, reg4);
2058}
2059
2060void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
2061 OperandStackDepthDecrement(2);
2062 __ Pop(reg1, reg2);
2063}
2064
2065void FullCodeGenerator::EmitOperandStackDepthCheck() {
2066 if (FLAG_debug_code) {
2067 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
2068 operand_stack_depth_ * kPointerSize;
2069 __ Dsubu(v0, fp, sp);
2070 __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
2071 }
2072}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002073
2074void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2075 Label allocate, done_allocate;
2076
2077 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate, TAG_OBJECT);
2078 __ jmp(&done_allocate);
2079
2080 __ bind(&allocate);
2081 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2082 __ CallRuntime(Runtime::kAllocateInNewSpace);
2083
2084 __ bind(&done_allocate);
2085 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
2086 __ pop(a2);
2087 __ LoadRoot(a3,
2088 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2089 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
2090 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2091 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2092 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2093 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2094 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2095 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2096}
2097
2098
2099void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2100 SetExpressionPosition(prop);
2101 Literal* key = prop->key()->AsLiteral();
2102 DCHECK(!prop->IsSuperAccess());
2103
2104 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2105 __ li(LoadDescriptor::SlotRegister(),
2106 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002107 CallLoadIC(NOT_INSIDE_TYPEOF);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002108}
2109
2110
2111void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2112 Token::Value op,
2113 Expression* left_expr,
2114 Expression* right_expr) {
2115 Label done, smi_case, stub_call;
2116
2117 Register scratch1 = a2;
2118 Register scratch2 = a3;
2119
2120 // Get the arguments.
2121 Register left = a1;
2122 Register right = a0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002123 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002124 __ mov(a0, result_register());
2125
2126 // Perform combined smi check on both operands.
2127 __ Or(scratch1, left, Operand(right));
2128 STATIC_ASSERT(kSmiTag == 0);
2129 JumpPatchSite patch_site(masm_);
2130 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2131
2132 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002133 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002134 CallIC(code, expr->BinaryOperationFeedbackId());
2135 patch_site.EmitPatchInfo();
2136 __ jmp(&done);
2137
2138 __ bind(&smi_case);
2139 // Smi case. This code works the same way as the smi-smi case in the type
2140 // recording binary operation stub, see
2141 switch (op) {
2142 case Token::SAR:
2143 __ GetLeastBitsFromSmi(scratch1, right, 5);
2144 __ dsrav(right, left, scratch1);
2145 __ And(v0, right, Operand(0xffffffff00000000L));
2146 break;
2147 case Token::SHL: {
2148 __ SmiUntag(scratch1, left);
2149 __ GetLeastBitsFromSmi(scratch2, right, 5);
2150 __ dsllv(scratch1, scratch1, scratch2);
2151 __ SmiTag(v0, scratch1);
2152 break;
2153 }
2154 case Token::SHR: {
2155 __ SmiUntag(scratch1, left);
2156 __ GetLeastBitsFromSmi(scratch2, right, 5);
2157 __ dsrlv(scratch1, scratch1, scratch2);
2158 __ And(scratch2, scratch1, 0x80000000);
2159 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2160 __ SmiTag(v0, scratch1);
2161 break;
2162 }
2163 case Token::ADD:
2164 __ DadduAndCheckForOverflow(v0, left, right, scratch1);
2165 __ BranchOnOverflow(&stub_call, scratch1);
2166 break;
2167 case Token::SUB:
2168 __ DsubuAndCheckForOverflow(v0, left, right, scratch1);
2169 __ BranchOnOverflow(&stub_call, scratch1);
2170 break;
2171 case Token::MUL: {
2172 __ Dmulh(v0, left, right);
2173 __ dsra32(scratch2, v0, 0);
2174 __ sra(scratch1, v0, 31);
2175 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2176 __ SmiTag(v0);
2177 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2178 __ Daddu(scratch2, right, left);
2179 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2180 DCHECK(Smi::FromInt(0) == 0);
2181 __ mov(v0, zero_reg);
2182 break;
2183 }
2184 case Token::BIT_OR:
2185 __ Or(v0, left, Operand(right));
2186 break;
2187 case Token::BIT_AND:
2188 __ And(v0, left, Operand(right));
2189 break;
2190 case Token::BIT_XOR:
2191 __ Xor(v0, left, Operand(right));
2192 break;
2193 default:
2194 UNREACHABLE();
2195 }
2196
2197 __ bind(&done);
2198 context()->Plug(v0);
2199}
2200
2201
2202void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002203 for (int i = 0; i < lit->properties()->length(); i++) {
2204 ObjectLiteral::Property* property = lit->properties()->at(i);
2205 Expression* value = property->value();
2206
Ben Murdoch097c5b22016-05-18 11:27:45 +01002207 Register scratch = a1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002208 if (property->is_static()) {
2209 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2210 } else {
2211 __ ld(scratch, MemOperand(sp, 0)); // prototype
2212 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002213 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002214 EmitPropertyKey(property, lit->GetIdForProperty(i));
2215
2216 // The static prototype property is read only. We handle the non computed
2217 // property name case in the parser. Since this is the only case where we
2218 // need to check for an own read only property we special case this so we do
2219 // not need to do this for every property.
2220 if (property->is_static() && property->is_computed_name()) {
2221 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2222 __ push(v0);
2223 }
2224
2225 VisitForStackValue(value);
2226 if (NeedsHomeObject(value)) {
2227 EmitSetHomeObject(value, 2, property->GetSlot());
2228 }
2229
2230 switch (property->kind()) {
2231 case ObjectLiteral::Property::CONSTANT:
2232 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2233 case ObjectLiteral::Property::PROTOTYPE:
2234 UNREACHABLE();
2235 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002236 PushOperand(Smi::FromInt(DONT_ENUM));
2237 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2238 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002239 break;
2240
2241 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002242 PushOperand(Smi::FromInt(DONT_ENUM));
2243 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002244 break;
2245
2246 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002247 PushOperand(Smi::FromInt(DONT_ENUM));
2248 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002249 break;
2250
2251 default:
2252 UNREACHABLE();
2253 }
2254 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002255}
2256
2257
2258void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2259 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002260 PopOperand(a1);
2261 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002262 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2263 CallIC(code, expr->BinaryOperationFeedbackId());
2264 patch_site.EmitPatchInfo();
2265 context()->Plug(v0);
2266}
2267
2268
2269void FullCodeGenerator::EmitAssignment(Expression* expr,
2270 FeedbackVectorSlot slot) {
2271 DCHECK(expr->IsValidReferenceExpressionOrThis());
2272
2273 Property* prop = expr->AsProperty();
2274 LhsKind assign_type = Property::GetAssignType(prop);
2275
2276 switch (assign_type) {
2277 case VARIABLE: {
2278 Variable* var = expr->AsVariableProxy()->var();
2279 EffectContext context(this);
2280 EmitVariableAssignment(var, Token::ASSIGN, slot);
2281 break;
2282 }
2283 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002284 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002285 VisitForAccumulatorValue(prop->obj());
2286 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002287 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002288 __ li(StoreDescriptor::NameRegister(),
2289 Operand(prop->key()->AsLiteral()->value()));
2290 EmitLoadStoreICSlot(slot);
2291 CallStoreIC();
2292 break;
2293 }
2294 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002295 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002296 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2297 VisitForAccumulatorValue(
2298 prop->obj()->AsSuperPropertyReference()->home_object());
2299 // stack: value, this; v0: home_object
2300 Register scratch = a2;
2301 Register scratch2 = a3;
2302 __ mov(scratch, result_register()); // home_object
2303 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2304 __ ld(scratch2, MemOperand(sp, 0)); // this
2305 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2306 __ sd(scratch, MemOperand(sp, 0)); // home_object
2307 // stack: this, home_object; v0: value
2308 EmitNamedSuperPropertyStore(prop);
2309 break;
2310 }
2311 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002312 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002313 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2314 VisitForStackValue(
2315 prop->obj()->AsSuperPropertyReference()->home_object());
2316 VisitForAccumulatorValue(prop->key());
2317 Register scratch = a2;
2318 Register scratch2 = a3;
2319 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2320 // stack: value, this, home_object; v0: key, a3: value
2321 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2322 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2323 __ ld(scratch, MemOperand(sp, 0)); // home_object
2324 __ sd(scratch, MemOperand(sp, kPointerSize));
2325 __ sd(v0, MemOperand(sp, 0));
2326 __ Move(v0, scratch2);
2327 // stack: this, home_object, key; v0: value.
2328 EmitKeyedSuperPropertyStore(prop);
2329 break;
2330 }
2331 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002332 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002333 VisitForStackValue(prop->obj());
2334 VisitForAccumulatorValue(prop->key());
2335 __ Move(StoreDescriptor::NameRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002336 PopOperands(StoreDescriptor::ValueRegister(),
2337 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002338 EmitLoadStoreICSlot(slot);
2339 Handle<Code> ic =
2340 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2341 CallIC(ic);
2342 break;
2343 }
2344 }
2345 context()->Plug(v0);
2346}
2347
2348
2349void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2350 Variable* var, MemOperand location) {
2351 __ sd(result_register(), location);
2352 if (var->IsContextSlot()) {
2353 // RecordWrite may destroy all its register arguments.
2354 __ Move(a3, result_register());
2355 int offset = Context::SlotOffset(var->index());
2356 __ RecordWriteContextSlot(
2357 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2358 }
2359}
2360
2361
2362void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2363 FeedbackVectorSlot slot) {
2364 if (var->IsUnallocated()) {
2365 // Global var, const, or let.
2366 __ mov(StoreDescriptor::ValueRegister(), result_register());
2367 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2368 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2369 EmitLoadStoreICSlot(slot);
2370 CallStoreIC();
2371
2372 } else if (var->mode() == LET && op != Token::INIT) {
2373 // Non-initializing assignment to let variable needs a write barrier.
2374 DCHECK(!var->IsLookupSlot());
2375 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2376 Label assign;
2377 MemOperand location = VarOperand(var, a1);
2378 __ ld(a3, location);
2379 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2380 __ Branch(&assign, ne, a3, Operand(a4));
2381 __ li(a3, Operand(var->name()));
2382 __ push(a3);
2383 __ CallRuntime(Runtime::kThrowReferenceError);
2384 // Perform the assignment.
2385 __ bind(&assign);
2386 EmitStoreToStackLocalOrContextSlot(var, location);
2387
2388 } else if (var->mode() == CONST && op != Token::INIT) {
2389 // Assignment to const variable needs a write barrier.
2390 DCHECK(!var->IsLookupSlot());
2391 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2392 Label const_error;
2393 MemOperand location = VarOperand(var, a1);
2394 __ ld(a3, location);
2395 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2396 __ Branch(&const_error, ne, a3, Operand(at));
2397 __ li(a3, Operand(var->name()));
2398 __ push(a3);
2399 __ CallRuntime(Runtime::kThrowReferenceError);
2400 __ bind(&const_error);
2401 __ CallRuntime(Runtime::kThrowConstAssignError);
2402
2403 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2404 // Initializing assignment to const {this} needs a write barrier.
2405 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2406 Label uninitialized_this;
2407 MemOperand location = VarOperand(var, a1);
2408 __ ld(a3, location);
2409 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2410 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2411 __ li(a0, Operand(var->name()));
2412 __ Push(a0);
2413 __ CallRuntime(Runtime::kThrowReferenceError);
2414 __ bind(&uninitialized_this);
2415 EmitStoreToStackLocalOrContextSlot(var, location);
2416
2417 } else if (!var->is_const_mode() ||
2418 (var->mode() == CONST && op == Token::INIT)) {
2419 if (var->IsLookupSlot()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002420 __ Push(var->name());
2421 __ Push(v0);
2422 __ CallRuntime(is_strict(language_mode())
2423 ? Runtime::kStoreLookupSlot_Strict
2424 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002425 } else {
2426 // Assignment to var or initializing assignment to let/const in harmony
2427 // mode.
2428 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2429 MemOperand location = VarOperand(var, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002430 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002431 // Check for an uninitialized let binding.
2432 __ ld(a2, location);
2433 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2434 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2435 }
2436 EmitStoreToStackLocalOrContextSlot(var, location);
2437 }
2438
2439 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2440 // Const initializers need a write barrier.
2441 DCHECK(!var->IsParameter()); // No const parameters.
2442 if (var->IsLookupSlot()) {
2443 __ li(a0, Operand(var->name()));
2444 __ Push(v0, cp, a0); // Context and name.
2445 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2446 } else {
2447 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2448 Label skip;
2449 MemOperand location = VarOperand(var, a1);
2450 __ ld(a2, location);
2451 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2452 __ Branch(&skip, ne, a2, Operand(at));
2453 EmitStoreToStackLocalOrContextSlot(var, location);
2454 __ bind(&skip);
2455 }
2456
2457 } else {
2458 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2459 if (is_strict(language_mode())) {
2460 __ CallRuntime(Runtime::kThrowConstAssignError);
2461 }
2462 // Silently ignore store in sloppy mode.
2463 }
2464}
2465
2466
2467void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2468 // Assignment to a property, using a named store IC.
2469 Property* prop = expr->target()->AsProperty();
2470 DCHECK(prop != NULL);
2471 DCHECK(prop->key()->IsLiteral());
2472
2473 __ mov(StoreDescriptor::ValueRegister(), result_register());
2474 __ li(StoreDescriptor::NameRegister(),
2475 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002476 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002477 EmitLoadStoreICSlot(expr->AssignmentSlot());
2478 CallStoreIC();
2479
2480 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2481 context()->Plug(v0);
2482}
2483
2484
2485void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2486 // Assignment to named property of super.
2487 // v0 : value
2488 // stack : receiver ('this'), home_object
2489 DCHECK(prop != NULL);
2490 Literal* key = prop->key()->AsLiteral();
2491 DCHECK(key != NULL);
2492
Ben Murdoch097c5b22016-05-18 11:27:45 +01002493 PushOperand(key->value());
2494 PushOperand(v0);
2495 CallRuntimeWithOperands(is_strict(language_mode())
2496 ? Runtime::kStoreToSuper_Strict
2497 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002498}
2499
2500
2501void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2502 // Assignment to named property of super.
2503 // v0 : value
2504 // stack : receiver ('this'), home_object, key
2505 DCHECK(prop != NULL);
2506
Ben Murdoch097c5b22016-05-18 11:27:45 +01002507 PushOperand(v0);
2508 CallRuntimeWithOperands(is_strict(language_mode())
2509 ? Runtime::kStoreKeyedToSuper_Strict
2510 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002511}
2512
2513
2514void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2515 // Assignment to a property, using a keyed store IC.
2516 // Call keyed store IC.
2517 // The arguments are:
2518 // - a0 is the value,
2519 // - a1 is the key,
2520 // - a2 is the receiver.
2521 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002522 PopOperands(StoreDescriptor::ReceiverRegister(),
2523 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002524 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2525
2526 Handle<Code> ic =
2527 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2528 EmitLoadStoreICSlot(expr->AssignmentSlot());
2529 CallIC(ic);
2530
2531 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2532 context()->Plug(v0);
2533}
2534
2535
2536void FullCodeGenerator::VisitProperty(Property* expr) {
2537 Comment cmnt(masm_, "[ Property");
2538 SetExpressionPosition(expr);
2539
2540 Expression* key = expr->key();
2541
2542 if (key->IsPropertyName()) {
2543 if (!expr->IsSuperAccess()) {
2544 VisitForAccumulatorValue(expr->obj());
2545 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2546 EmitNamedPropertyLoad(expr);
2547 } else {
2548 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2549 VisitForStackValue(
2550 expr->obj()->AsSuperPropertyReference()->home_object());
2551 EmitNamedSuperPropertyLoad(expr);
2552 }
2553 } else {
2554 if (!expr->IsSuperAccess()) {
2555 VisitForStackValue(expr->obj());
2556 VisitForAccumulatorValue(expr->key());
2557 __ Move(LoadDescriptor::NameRegister(), v0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002558 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002559 EmitKeyedPropertyLoad(expr);
2560 } else {
2561 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2562 VisitForStackValue(
2563 expr->obj()->AsSuperPropertyReference()->home_object());
2564 VisitForStackValue(expr->key());
2565 EmitKeyedSuperPropertyLoad(expr);
2566 }
2567 }
2568 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2569 context()->Plug(v0);
2570}
2571
2572
2573void FullCodeGenerator::CallIC(Handle<Code> code,
2574 TypeFeedbackId id) {
2575 ic_total_count_++;
2576 __ Call(code, RelocInfo::CODE_TARGET, id);
2577}
2578
2579
2580// Code common for calls using the IC.
2581void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2582 Expression* callee = expr->expression();
2583
2584 // Get the target function.
2585 ConvertReceiverMode convert_mode;
2586 if (callee->IsVariableProxy()) {
2587 { StackValueContext context(this);
2588 EmitVariableLoad(callee->AsVariableProxy());
2589 PrepareForBailout(callee, NO_REGISTERS);
2590 }
2591 // Push undefined as receiver. This is patched in the method prologue if it
2592 // is a sloppy mode method.
2593 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002594 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002595 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2596 } else {
2597 // Load the function from the receiver.
2598 DCHECK(callee->IsProperty());
2599 DCHECK(!callee->AsProperty()->IsSuperAccess());
2600 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2601 EmitNamedPropertyLoad(callee->AsProperty());
2602 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2603 // Push the target function under the receiver.
2604 __ ld(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002605 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002606 __ sd(v0, MemOperand(sp, kPointerSize));
2607 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2608 }
2609
2610 EmitCall(expr, convert_mode);
2611}
2612
2613
2614void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2615 SetExpressionPosition(expr);
2616 Expression* callee = expr->expression();
2617 DCHECK(callee->IsProperty());
2618 Property* prop = callee->AsProperty();
2619 DCHECK(prop->IsSuperAccess());
2620
2621 Literal* key = prop->key()->AsLiteral();
2622 DCHECK(!key->value()->IsSmi());
2623 // Load the function from the receiver.
2624 const Register scratch = a1;
2625 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2626 VisitForAccumulatorValue(super_ref->home_object());
2627 __ mov(scratch, v0);
2628 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002629 PushOperands(scratch, v0, v0, scratch);
2630 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002631
2632 // Stack here:
2633 // - home_object
2634 // - this (receiver)
2635 // - this (receiver) <-- LoadFromSuper will pop here and below.
2636 // - home_object
2637 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002638 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002639
2640 // Replace home_object with target function.
2641 __ sd(v0, MemOperand(sp, kPointerSize));
2642
2643 // Stack here:
2644 // - target function
2645 // - this (receiver)
2646 EmitCall(expr);
2647}
2648
2649
2650// Code common for calls using the IC.
2651void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2652 Expression* key) {
2653 // Load the key.
2654 VisitForAccumulatorValue(key);
2655
2656 Expression* callee = expr->expression();
2657
2658 // Load the function from the receiver.
2659 DCHECK(callee->IsProperty());
2660 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2661 __ Move(LoadDescriptor::NameRegister(), v0);
2662 EmitKeyedPropertyLoad(callee->AsProperty());
2663 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2664
2665 // Push the target function under the receiver.
2666 __ ld(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002667 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002668 __ sd(v0, MemOperand(sp, kPointerSize));
2669
2670 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2671}
2672
2673
2674void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2675 Expression* callee = expr->expression();
2676 DCHECK(callee->IsProperty());
2677 Property* prop = callee->AsProperty();
2678 DCHECK(prop->IsSuperAccess());
2679
2680 SetExpressionPosition(prop);
2681 // Load the function from the receiver.
2682 const Register scratch = a1;
2683 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2684 VisitForAccumulatorValue(super_ref->home_object());
2685 __ Move(scratch, v0);
2686 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002687 PushOperands(scratch, v0, v0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002688 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002689
2690 // Stack here:
2691 // - home_object
2692 // - this (receiver)
2693 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2694 // - home_object
2695 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002696 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002697
2698 // Replace home_object with target function.
2699 __ sd(v0, MemOperand(sp, kPointerSize));
2700
2701 // Stack here:
2702 // - target function
2703 // - this (receiver)
2704 EmitCall(expr);
2705}
2706
2707
2708void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2709 // Load the arguments.
2710 ZoneList<Expression*>* args = expr->arguments();
2711 int arg_count = args->length();
2712 for (int i = 0; i < arg_count; i++) {
2713 VisitForStackValue(args->at(i));
2714 }
2715
2716 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2717 // Record source position of the IC call.
2718 SetCallPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002719 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2720 if (FLAG_trace) {
2721 __ CallRuntime(Runtime::kTraceTailCall);
2722 }
2723 // Update profiling counters before the tail call since we will
2724 // not return to this function.
2725 EmitProfilingCounterHandlingForReturnSequence(true);
2726 }
2727 Handle<Code> ic =
2728 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2729 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002730 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2731 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2732 // Don't assign a type feedback id to the IC, since type feedback is provided
2733 // by the vector above.
2734 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002735 OperandStackDepthDecrement(arg_count + 1);
2736
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002737 RecordJSReturnSite(expr);
2738 // Restore context register.
2739 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2740 context()->DropAndPlug(1, v0);
2741}
2742
2743
2744void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2745 // a6: copy of the first argument or undefined if it doesn't exist.
2746 if (arg_count > 0) {
2747 __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
2748 } else {
2749 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
2750 }
2751
2752 // a5: the receiver of the enclosing function.
2753 __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2754
2755 // a4: the language mode.
2756 __ li(a4, Operand(Smi::FromInt(language_mode())));
2757
2758 // a1: the start position of the scope the calls resides in.
2759 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2760
2761 // Do the runtime call.
2762 __ Push(a6, a5, a4, a1);
2763 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2764}
2765
2766
2767// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2768void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2769 VariableProxy* callee = expr->expression()->AsVariableProxy();
2770 if (callee->var()->IsLookupSlot()) {
2771 Label slow, done;
2772
2773 SetExpressionPosition(callee);
2774 // Generate code for loading from variables potentially shadowed by
2775 // eval-introduced variables.
2776 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2777
2778 __ bind(&slow);
2779 // Call the runtime to find the function to call (returned in v0)
2780 // and the object holding it (returned in v1).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002781 __ Push(callee->name());
2782 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2783 PushOperands(v0, v1); // Function, receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002784 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2785
2786 // If fast case code has been generated, emit code to push the
2787 // function and receiver and have the slow path jump around this
2788 // code.
2789 if (done.is_linked()) {
2790 Label call;
2791 __ Branch(&call);
2792 __ bind(&done);
2793 // Push function.
2794 __ push(v0);
2795 // The receiver is implicitly the global receiver. Indicate this
2796 // by passing the hole to the call function stub.
2797 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2798 __ push(a1);
2799 __ bind(&call);
2800 }
2801 } else {
2802 VisitForStackValue(callee);
2803 // refEnv.WithBaseObject()
2804 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002805 PushOperand(a2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002806 }
2807}
2808
2809
2810void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2811 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2812 // to resolve the function we need to call. Then we call the resolved
2813 // function using the given arguments.
2814 ZoneList<Expression*>* args = expr->arguments();
2815 int arg_count = args->length();
2816 PushCalleeAndWithBaseObject(expr);
2817
2818 // Push the arguments.
2819 for (int i = 0; i < arg_count; i++) {
2820 VisitForStackValue(args->at(i));
2821 }
2822
2823 // Push a copy of the function (found below the arguments) and
2824 // resolve eval.
2825 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2826 __ push(a1);
2827 EmitResolvePossiblyDirectEval(arg_count);
2828
2829 // Touch up the stack with the resolved function.
2830 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2831
2832 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2833 // Record source position for debugger.
2834 SetCallPosition(expr);
2835 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2836 __ li(a0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002837 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2838 expr->tail_call_mode()),
2839 RelocInfo::CODE_TARGET);
2840 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002841 RecordJSReturnSite(expr);
2842 // Restore context register.
2843 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2844 context()->DropAndPlug(1, v0);
2845}
2846
2847
2848void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2849 Comment cmnt(masm_, "[ CallNew");
2850 // According to ECMA-262, section 11.2.2, page 44, the function
2851 // expression in new calls must be evaluated before the
2852 // arguments.
2853
2854 // Push constructor on the stack. If it's not a function it's used as
2855 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2856 // ignored.
2857 DCHECK(!expr->expression()->IsSuperPropertyReference());
2858 VisitForStackValue(expr->expression());
2859
2860 // Push the arguments ("left-to-right") on the stack.
2861 ZoneList<Expression*>* args = expr->arguments();
2862 int arg_count = args->length();
2863 for (int i = 0; i < arg_count; i++) {
2864 VisitForStackValue(args->at(i));
2865 }
2866
2867 // Call the construct call builtin that handles allocation and
2868 // constructor invocation.
2869 SetConstructCallPosition(expr);
2870
2871 // Load function and argument count into a1 and a0.
2872 __ li(a0, Operand(arg_count));
2873 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2874
2875 // Record call targets in unoptimized code.
2876 __ EmitLoadTypeFeedbackVector(a2);
2877 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2878
2879 CallConstructStub stub(isolate());
2880 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002881 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002882 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2883 // Restore context register.
2884 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2885 context()->Plug(v0);
2886}
2887
2888
2889void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2890 SuperCallReference* super_call_ref =
2891 expr->expression()->AsSuperCallReference();
2892 DCHECK_NOT_NULL(super_call_ref);
2893
2894 // Push the super constructor target on the stack (may be null,
2895 // but the Construct builtin can deal with that properly).
2896 VisitForAccumulatorValue(super_call_ref->this_function_var());
2897 __ AssertFunction(result_register());
2898 __ ld(result_register(),
2899 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2900 __ ld(result_register(),
2901 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002902 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002903
2904 // Push the arguments ("left-to-right") on the stack.
2905 ZoneList<Expression*>* args = expr->arguments();
2906 int arg_count = args->length();
2907 for (int i = 0; i < arg_count; i++) {
2908 VisitForStackValue(args->at(i));
2909 }
2910
2911 // Call the construct call builtin that handles allocation and
2912 // constructor invocation.
2913 SetConstructCallPosition(expr);
2914
2915 // Load new target into a3.
2916 VisitForAccumulatorValue(super_call_ref->new_target_var());
2917 __ mov(a3, result_register());
2918
2919 // Load function and argument count into a1 and a0.
2920 __ li(a0, Operand(arg_count));
2921 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2922
2923 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002924 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002925
2926 RecordJSReturnSite(expr);
2927
2928 // Restore context register.
2929 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2930 context()->Plug(v0);
2931}
2932
2933
2934void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2935 ZoneList<Expression*>* args = expr->arguments();
2936 DCHECK(args->length() == 1);
2937
2938 VisitForAccumulatorValue(args->at(0));
2939
2940 Label materialize_true, materialize_false;
2941 Label* if_true = NULL;
2942 Label* if_false = NULL;
2943 Label* fall_through = NULL;
2944 context()->PrepareTest(&materialize_true, &materialize_false,
2945 &if_true, &if_false, &fall_through);
2946
2947 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2948 __ SmiTst(v0, a4);
2949 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2950
2951 context()->Plug(if_true, if_false);
2952}
2953
2954
2955void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2956 ZoneList<Expression*>* args = expr->arguments();
2957 DCHECK(args->length() == 1);
2958
2959 VisitForAccumulatorValue(args->at(0));
2960
2961 Label materialize_true, materialize_false;
2962 Label* if_true = NULL;
2963 Label* if_false = NULL;
2964 Label* fall_through = NULL;
2965 context()->PrepareTest(&materialize_true, &materialize_false,
2966 &if_true, &if_false, &fall_through);
2967
2968 __ JumpIfSmi(v0, if_false);
2969 __ GetObjectType(v0, a1, a1);
2970 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2971 Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2972 if_true, if_false, fall_through);
2973
2974 context()->Plug(if_true, if_false);
2975}
2976
2977
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002978void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2979 ZoneList<Expression*>* args = expr->arguments();
2980 DCHECK(args->length() == 1);
2981
2982 VisitForAccumulatorValue(args->at(0));
2983
2984 Label materialize_true, materialize_false;
2985 Label* if_true = NULL;
2986 Label* if_false = NULL;
2987 Label* fall_through = NULL;
2988 context()->PrepareTest(&materialize_true, &materialize_false,
2989 &if_true, &if_false, &fall_through);
2990
2991 __ JumpIfSmi(v0, if_false);
2992 __ GetObjectType(v0, a1, a1);
2993 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2994 Split(eq, a1, Operand(JS_ARRAY_TYPE),
2995 if_true, if_false, fall_through);
2996
2997 context()->Plug(if_true, if_false);
2998}
2999
3000
3001void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3002 ZoneList<Expression*>* args = expr->arguments();
3003 DCHECK(args->length() == 1);
3004
3005 VisitForAccumulatorValue(args->at(0));
3006
3007 Label materialize_true, materialize_false;
3008 Label* if_true = NULL;
3009 Label* if_false = NULL;
3010 Label* fall_through = NULL;
3011 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3012 &if_false, &fall_through);
3013
3014 __ JumpIfSmi(v0, if_false);
3015 __ GetObjectType(v0, a1, a1);
3016 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3017 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
3018
3019 context()->Plug(if_true, if_false);
3020}
3021
3022
3023void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3024 ZoneList<Expression*>* args = expr->arguments();
3025 DCHECK(args->length() == 1);
3026
3027 VisitForAccumulatorValue(args->at(0));
3028
3029 Label materialize_true, materialize_false;
3030 Label* if_true = NULL;
3031 Label* if_false = NULL;
3032 Label* fall_through = NULL;
3033 context()->PrepareTest(&materialize_true, &materialize_false,
3034 &if_true, &if_false, &fall_through);
3035
3036 __ JumpIfSmi(v0, if_false);
3037 __ GetObjectType(v0, a1, a1);
3038 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3039 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3040
3041 context()->Plug(if_true, if_false);
3042}
3043
3044
3045void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3046 ZoneList<Expression*>* args = expr->arguments();
3047 DCHECK(args->length() == 1);
3048
3049 VisitForAccumulatorValue(args->at(0));
3050
3051 Label materialize_true, materialize_false;
3052 Label* if_true = NULL;
3053 Label* if_false = NULL;
3054 Label* fall_through = NULL;
3055 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3056 &if_false, &fall_through);
3057
3058 __ JumpIfSmi(v0, if_false);
3059 __ GetObjectType(v0, a1, a1);
3060 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3061 Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
3062
3063 context()->Plug(if_true, if_false);
3064}
3065
3066
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003067void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3068 ZoneList<Expression*>* args = expr->arguments();
3069 DCHECK(args->length() == 1);
3070 Label done, null, function, non_function_constructor;
3071
3072 VisitForAccumulatorValue(args->at(0));
3073
3074 // If the object is not a JSReceiver, we return null.
3075 __ JumpIfSmi(v0, &null);
3076 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3077 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3078 __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3079
3080 // Return 'Function' for JSFunction objects.
3081 __ Branch(&function, eq, a1, Operand(JS_FUNCTION_TYPE));
3082
3083 // Check if the constructor in the map is a JS function.
3084 Register instance_type = a2;
3085 __ GetMapConstructor(v0, v0, a1, instance_type);
3086 __ Branch(&non_function_constructor, ne, instance_type,
3087 Operand(JS_FUNCTION_TYPE));
3088
3089 // v0 now contains the constructor function. Grab the
3090 // instance class name from there.
3091 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3092 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3093 __ Branch(&done);
3094
3095 // Functions have class 'Function'.
3096 __ bind(&function);
3097 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3098 __ jmp(&done);
3099
3100 // Objects with a non-function constructor have class 'Object'.
3101 __ bind(&non_function_constructor);
3102 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3103 __ jmp(&done);
3104
3105 // Non-JS objects have class null.
3106 __ bind(&null);
3107 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3108
3109 // All done.
3110 __ bind(&done);
3111
3112 context()->Plug(v0);
3113}
3114
3115
3116void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3117 ZoneList<Expression*>* args = expr->arguments();
3118 DCHECK(args->length() == 1);
3119
3120 VisitForAccumulatorValue(args->at(0)); // Load the object.
3121
3122 Label done;
3123 // If the object is a smi return the object.
3124 __ JumpIfSmi(v0, &done);
3125 // If the object is not a value type, return the object.
3126 __ GetObjectType(v0, a1, a1);
3127 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3128
3129 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3130
3131 __ bind(&done);
3132 context()->Plug(v0);
3133}
3134
3135
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003136void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3137 ZoneList<Expression*>* args = expr->arguments();
3138 DCHECK_EQ(3, args->length());
3139
3140 Register string = v0;
3141 Register index = a1;
3142 Register value = a2;
3143
3144 VisitForStackValue(args->at(0)); // index
3145 VisitForStackValue(args->at(1)); // value
3146 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003147 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003148
3149 if (FLAG_debug_code) {
3150 __ SmiTst(value, at);
3151 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3152 __ SmiTst(index, at);
3153 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3154 __ SmiUntag(index, index);
3155 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3156 Register scratch = t1;
3157 __ EmitSeqStringSetCharCheck(
3158 string, index, value, scratch, one_byte_seq_type);
3159 __ SmiTag(index, index);
3160 }
3161
3162 __ SmiUntag(value, value);
3163 __ Daddu(at,
3164 string,
3165 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3166 __ SmiUntag(index);
3167 __ Daddu(at, at, index);
3168 __ sb(value, MemOperand(at));
3169 context()->Plug(string);
3170}
3171
3172
3173void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3174 ZoneList<Expression*>* args = expr->arguments();
3175 DCHECK_EQ(3, args->length());
3176
3177 Register string = v0;
3178 Register index = a1;
3179 Register value = a2;
3180
3181 VisitForStackValue(args->at(0)); // index
3182 VisitForStackValue(args->at(1)); // value
3183 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003184 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003185
3186 if (FLAG_debug_code) {
3187 __ SmiTst(value, at);
3188 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3189 __ SmiTst(index, at);
3190 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3191 __ SmiUntag(index, index);
3192 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3193 Register scratch = t1;
3194 __ EmitSeqStringSetCharCheck(
3195 string, index, value, scratch, two_byte_seq_type);
3196 __ SmiTag(index, index);
3197 }
3198
3199 __ SmiUntag(value, value);
3200 __ Daddu(at,
3201 string,
3202 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3203 __ dsra(index, index, 32 - 1);
3204 __ Daddu(at, at, index);
3205 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3206 __ sh(value, MemOperand(at));
3207 context()->Plug(string);
3208}
3209
3210
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003211void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3212 ZoneList<Expression*>* args = expr->arguments();
3213 DCHECK_EQ(1, args->length());
3214
3215 // Load the argument into v0 and convert it.
3216 VisitForAccumulatorValue(args->at(0));
3217
3218 // Convert the object to an integer.
3219 Label done_convert;
3220 __ JumpIfSmi(v0, &done_convert);
3221 __ Push(v0);
3222 __ CallRuntime(Runtime::kToInteger);
3223 __ bind(&done_convert);
3224 context()->Plug(v0);
3225}
3226
3227
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003228void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3229 ZoneList<Expression*>* args = expr->arguments();
3230 DCHECK(args->length() == 1);
3231
3232 VisitForAccumulatorValue(args->at(0));
3233
3234 Label done;
3235 StringCharFromCodeGenerator generator(v0, a1);
3236 generator.GenerateFast(masm_);
3237 __ jmp(&done);
3238
3239 NopRuntimeCallHelper call_helper;
3240 generator.GenerateSlow(masm_, call_helper);
3241
3242 __ bind(&done);
3243 context()->Plug(a1);
3244}
3245
3246
3247void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3248 ZoneList<Expression*>* args = expr->arguments();
3249 DCHECK(args->length() == 2);
3250
3251 VisitForStackValue(args->at(0));
3252 VisitForAccumulatorValue(args->at(1));
3253 __ mov(a0, result_register());
3254
3255 Register object = a1;
3256 Register index = a0;
3257 Register result = v0;
3258
Ben Murdoch097c5b22016-05-18 11:27:45 +01003259 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003260
3261 Label need_conversion;
3262 Label index_out_of_range;
3263 Label done;
3264 StringCharCodeAtGenerator generator(object,
3265 index,
3266 result,
3267 &need_conversion,
3268 &need_conversion,
3269 &index_out_of_range,
3270 STRING_INDEX_IS_NUMBER);
3271 generator.GenerateFast(masm_);
3272 __ jmp(&done);
3273
3274 __ bind(&index_out_of_range);
3275 // When the index is out of range, the spec requires us to return
3276 // NaN.
3277 __ LoadRoot(result, Heap::kNanValueRootIndex);
3278 __ jmp(&done);
3279
3280 __ bind(&need_conversion);
3281 // Load the undefined value into the result register, which will
3282 // trigger conversion.
3283 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3284 __ jmp(&done);
3285
3286 NopRuntimeCallHelper call_helper;
3287 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3288
3289 __ bind(&done);
3290 context()->Plug(result);
3291}
3292
3293
3294void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3295 ZoneList<Expression*>* args = expr->arguments();
3296 DCHECK(args->length() == 2);
3297
3298 VisitForStackValue(args->at(0));
3299 VisitForAccumulatorValue(args->at(1));
3300 __ mov(a0, result_register());
3301
3302 Register object = a1;
3303 Register index = a0;
3304 Register scratch = a3;
3305 Register result = v0;
3306
Ben Murdoch097c5b22016-05-18 11:27:45 +01003307 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003308
3309 Label need_conversion;
3310 Label index_out_of_range;
3311 Label done;
3312 StringCharAtGenerator generator(object,
3313 index,
3314 scratch,
3315 result,
3316 &need_conversion,
3317 &need_conversion,
3318 &index_out_of_range,
3319 STRING_INDEX_IS_NUMBER);
3320 generator.GenerateFast(masm_);
3321 __ jmp(&done);
3322
3323 __ bind(&index_out_of_range);
3324 // When the index is out of range, the spec requires us to return
3325 // the empty string.
3326 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3327 __ jmp(&done);
3328
3329 __ bind(&need_conversion);
3330 // Move smi zero into the result register, which will trigger
3331 // conversion.
3332 __ li(result, Operand(Smi::FromInt(0)));
3333 __ jmp(&done);
3334
3335 NopRuntimeCallHelper call_helper;
3336 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3337
3338 __ bind(&done);
3339 context()->Plug(result);
3340}
3341
3342
3343void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3344 ZoneList<Expression*>* args = expr->arguments();
3345 DCHECK_LE(2, args->length());
3346 // Push target, receiver and arguments onto the stack.
3347 for (Expression* const arg : *args) {
3348 VisitForStackValue(arg);
3349 }
3350 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3351 // Move target to a1.
3352 int const argc = args->length() - 2;
3353 __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
3354 // Call the target.
3355 __ li(a0, Operand(argc));
3356 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003357 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003358 // Restore context register.
3359 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3360 // Discard the function left on TOS.
3361 context()->DropAndPlug(1, v0);
3362}
3363
3364
3365void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3366 ZoneList<Expression*>* args = expr->arguments();
3367 VisitForAccumulatorValue(args->at(0));
3368
3369 Label materialize_true, materialize_false;
3370 Label* if_true = NULL;
3371 Label* if_false = NULL;
3372 Label* fall_through = NULL;
3373 context()->PrepareTest(&materialize_true, &materialize_false,
3374 &if_true, &if_false, &fall_through);
3375
3376 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3377 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3378
3379 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3380 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3381
3382 context()->Plug(if_true, if_false);
3383}
3384
3385
3386void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3387 ZoneList<Expression*>* args = expr->arguments();
3388 DCHECK(args->length() == 1);
3389 VisitForAccumulatorValue(args->at(0));
3390
3391 __ AssertString(v0);
3392
3393 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3394 __ IndexFromHash(v0, v0);
3395
3396 context()->Plug(v0);
3397}
3398
3399
3400void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3401 ZoneList<Expression*>* args = expr->arguments();
3402 DCHECK_EQ(1, args->length());
3403 VisitForAccumulatorValue(args->at(0));
3404 __ AssertFunction(v0);
3405 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3406 __ ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
3407 context()->Plug(v0);
3408}
3409
3410
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003411void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3412 DCHECK(expr->arguments()->length() == 0);
3413 ExternalReference debug_is_active =
3414 ExternalReference::debug_is_active_address(isolate());
3415 __ li(at, Operand(debug_is_active));
3416 __ lbu(v0, MemOperand(at));
3417 __ SmiTag(v0);
3418 context()->Plug(v0);
3419}
3420
3421
3422void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3423 ZoneList<Expression*>* args = expr->arguments();
3424 DCHECK_EQ(2, args->length());
3425 VisitForStackValue(args->at(0));
3426 VisitForStackValue(args->at(1));
3427
3428 Label runtime, done;
3429
3430 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT);
3431 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
3432 __ Pop(a2, a3);
3433 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
3434 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3435 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3436 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
3437 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
3438 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
3439 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3440 __ jmp(&done);
3441
3442 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003443 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003444
3445 __ bind(&done);
3446 context()->Plug(v0);
3447}
3448
3449
3450void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3451 // Push undefined as the receiver.
3452 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003453 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003454
3455 __ LoadNativeContextSlot(expr->context_index(), v0);
3456}
3457
3458
3459void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3460 ZoneList<Expression*>* args = expr->arguments();
3461 int arg_count = args->length();
3462
3463 SetCallPosition(expr);
3464 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3465 __ li(a0, Operand(arg_count));
3466 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3467 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003468 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003469}
3470
3471
3472void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3473 ZoneList<Expression*>* args = expr->arguments();
3474 int arg_count = args->length();
3475
3476 if (expr->is_jsruntime()) {
3477 Comment cmnt(masm_, "[ CallRuntime");
3478 EmitLoadJSRuntimeFunction(expr);
3479
3480 // Push the target function under the receiver.
3481 __ ld(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003482 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003483 __ sd(v0, MemOperand(sp, kPointerSize));
3484
3485 // Push the arguments ("left-to-right").
3486 for (int i = 0; i < arg_count; i++) {
3487 VisitForStackValue(args->at(i));
3488 }
3489
3490 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3491 EmitCallJSRuntimeFunction(expr);
3492
3493 // Restore context register.
3494 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3495
3496 context()->DropAndPlug(1, v0);
3497 } else {
3498 const Runtime::Function* function = expr->function();
3499 switch (function->function_id) {
3500#define CALL_INTRINSIC_GENERATOR(Name) \
3501 case Runtime::kInline##Name: { \
3502 Comment cmnt(masm_, "[ Inline" #Name); \
3503 return Emit##Name(expr); \
3504 }
3505 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3506#undef CALL_INTRINSIC_GENERATOR
3507 default: {
3508 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3509 // Push the arguments ("left-to-right").
3510 for (int i = 0; i < arg_count; i++) {
3511 VisitForStackValue(args->at(i));
3512 }
3513
3514 // Call the C runtime function.
3515 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3516 __ CallRuntime(expr->function(), arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003517 OperandStackDepthDecrement(arg_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003518 context()->Plug(v0);
3519 }
3520 }
3521 }
3522}
3523
3524
3525void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3526 switch (expr->op()) {
3527 case Token::DELETE: {
3528 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3529 Property* property = expr->expression()->AsProperty();
3530 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3531
3532 if (property != NULL) {
3533 VisitForStackValue(property->obj());
3534 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003535 CallRuntimeWithOperands(is_strict(language_mode())
3536 ? Runtime::kDeleteProperty_Strict
3537 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003538 context()->Plug(v0);
3539 } else if (proxy != NULL) {
3540 Variable* var = proxy->var();
3541 // Delete of an unqualified identifier is disallowed in strict mode but
3542 // "delete this" is allowed.
3543 bool is_this = var->HasThisName(isolate());
3544 DCHECK(is_sloppy(language_mode()) || is_this);
3545 if (var->IsUnallocatedOrGlobalSlot()) {
3546 __ LoadGlobalObject(a2);
3547 __ li(a1, Operand(var->name()));
3548 __ Push(a2, a1);
3549 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3550 context()->Plug(v0);
3551 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3552 // Result of deleting non-global, non-dynamic variables is false.
3553 // The subexpression does not have side effects.
3554 context()->Plug(is_this);
3555 } else {
3556 // Non-global variable. Call the runtime to try to delete from the
3557 // context where the variable was introduced.
3558 DCHECK(!context_register().is(a2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003559 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003560 __ CallRuntime(Runtime::kDeleteLookupSlot);
3561 context()->Plug(v0);
3562 }
3563 } else {
3564 // Result of deleting non-property, non-variable reference is true.
3565 // The subexpression may have side effects.
3566 VisitForEffect(expr->expression());
3567 context()->Plug(true);
3568 }
3569 break;
3570 }
3571
3572 case Token::VOID: {
3573 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3574 VisitForEffect(expr->expression());
3575 context()->Plug(Heap::kUndefinedValueRootIndex);
3576 break;
3577 }
3578
3579 case Token::NOT: {
3580 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3581 if (context()->IsEffect()) {
3582 // Unary NOT has no side effects so it's only necessary to visit the
3583 // subexpression. Match the optimizing compiler by not branching.
3584 VisitForEffect(expr->expression());
3585 } else if (context()->IsTest()) {
3586 const TestContext* test = TestContext::cast(context());
3587 // The labels are swapped for the recursive call.
3588 VisitForControl(expr->expression(),
3589 test->false_label(),
3590 test->true_label(),
3591 test->fall_through());
3592 context()->Plug(test->true_label(), test->false_label());
3593 } else {
3594 // We handle value contexts explicitly rather than simply visiting
3595 // for control and plugging the control flow into the context,
3596 // because we need to prepare a pair of extra administrative AST ids
3597 // for the optimizing compiler.
3598 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3599 Label materialize_true, materialize_false, done;
3600 VisitForControl(expr->expression(),
3601 &materialize_false,
3602 &materialize_true,
3603 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003604 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003605 __ bind(&materialize_true);
3606 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3607 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3608 if (context()->IsStackValue()) __ push(v0);
3609 __ jmp(&done);
3610 __ bind(&materialize_false);
3611 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3612 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3613 if (context()->IsStackValue()) __ push(v0);
3614 __ bind(&done);
3615 }
3616 break;
3617 }
3618
3619 case Token::TYPEOF: {
3620 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3621 {
3622 AccumulatorValueContext context(this);
3623 VisitForTypeofValue(expr->expression());
3624 }
3625 __ mov(a3, v0);
3626 TypeofStub typeof_stub(isolate());
3627 __ CallStub(&typeof_stub);
3628 context()->Plug(v0);
3629 break;
3630 }
3631
3632 default:
3633 UNREACHABLE();
3634 }
3635}
3636
3637
3638void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3639 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3640
3641 Comment cmnt(masm_, "[ CountOperation");
3642
3643 Property* prop = expr->expression()->AsProperty();
3644 LhsKind assign_type = Property::GetAssignType(prop);
3645
3646 // Evaluate expression and get value.
3647 if (assign_type == VARIABLE) {
3648 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3649 AccumulatorValueContext context(this);
3650 EmitVariableLoad(expr->expression()->AsVariableProxy());
3651 } else {
3652 // Reserve space for result of postfix operation.
3653 if (expr->is_postfix() && !context()->IsEffect()) {
3654 __ li(at, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003655 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003656 }
3657 switch (assign_type) {
3658 case NAMED_PROPERTY: {
3659 // Put the object both on the stack and in the register.
3660 VisitForStackValue(prop->obj());
3661 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3662 EmitNamedPropertyLoad(prop);
3663 break;
3664 }
3665
3666 case NAMED_SUPER_PROPERTY: {
3667 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3668 VisitForAccumulatorValue(
3669 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003670 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003671 const Register scratch = a1;
3672 __ ld(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003673 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003674 EmitNamedSuperPropertyLoad(prop);
3675 break;
3676 }
3677
3678 case KEYED_SUPER_PROPERTY: {
3679 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3680 VisitForAccumulatorValue(
3681 prop->obj()->AsSuperPropertyReference()->home_object());
3682 const Register scratch = a1;
3683 const Register scratch1 = a4;
3684 __ Move(scratch, result_register());
3685 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003686 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003687 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003688 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003689 EmitKeyedSuperPropertyLoad(prop);
3690 break;
3691 }
3692
3693 case KEYED_PROPERTY: {
3694 VisitForStackValue(prop->obj());
3695 VisitForStackValue(prop->key());
3696 __ ld(LoadDescriptor::ReceiverRegister(),
3697 MemOperand(sp, 1 * kPointerSize));
3698 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3699 EmitKeyedPropertyLoad(prop);
3700 break;
3701 }
3702
3703 case VARIABLE:
3704 UNREACHABLE();
3705 }
3706 }
3707
3708 // We need a second deoptimization point after loading the value
3709 // in case evaluating the property load my have a side effect.
3710 if (assign_type == VARIABLE) {
3711 PrepareForBailout(expr->expression(), TOS_REG);
3712 } else {
3713 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3714 }
3715
3716 // Inline smi case if we are in a loop.
3717 Label stub_call, done;
3718 JumpPatchSite patch_site(masm_);
3719
3720 int count_value = expr->op() == Token::INC ? 1 : -1;
3721 __ mov(a0, v0);
3722 if (ShouldInlineSmiCase(expr->op())) {
3723 Label slow;
3724 patch_site.EmitJumpIfNotSmi(v0, &slow);
3725
3726 // Save result for postfix expressions.
3727 if (expr->is_postfix()) {
3728 if (!context()->IsEffect()) {
3729 // Save the result on the stack. If we have a named or keyed property
3730 // we store the result under the receiver that is currently on top
3731 // of the stack.
3732 switch (assign_type) {
3733 case VARIABLE:
3734 __ push(v0);
3735 break;
3736 case NAMED_PROPERTY:
3737 __ sd(v0, MemOperand(sp, kPointerSize));
3738 break;
3739 case NAMED_SUPER_PROPERTY:
3740 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3741 break;
3742 case KEYED_PROPERTY:
3743 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3744 break;
3745 case KEYED_SUPER_PROPERTY:
3746 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3747 break;
3748 }
3749 }
3750 }
3751
3752 Register scratch1 = a1;
3753 Register scratch2 = a4;
3754 __ li(scratch1, Operand(Smi::FromInt(count_value)));
3755 __ DadduAndCheckForOverflow(v0, v0, scratch1, scratch2);
3756 __ BranchOnNoOverflow(&done, scratch2);
3757 // Call stub. Undo operation first.
3758 __ Move(v0, a0);
3759 __ jmp(&stub_call);
3760 __ bind(&slow);
3761 }
3762 if (!is_strong(language_mode())) {
3763 ToNumberStub convert_stub(isolate());
3764 __ CallStub(&convert_stub);
3765 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3766 }
3767
3768 // Save result for postfix expressions.
3769 if (expr->is_postfix()) {
3770 if (!context()->IsEffect()) {
3771 // Save the result on the stack. If we have a named or keyed property
3772 // we store the result under the receiver that is currently on top
3773 // of the stack.
3774 switch (assign_type) {
3775 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003776 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003777 break;
3778 case NAMED_PROPERTY:
3779 __ sd(v0, MemOperand(sp, kPointerSize));
3780 break;
3781 case NAMED_SUPER_PROPERTY:
3782 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3783 break;
3784 case KEYED_PROPERTY:
3785 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3786 break;
3787 case KEYED_SUPER_PROPERTY:
3788 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3789 break;
3790 }
3791 }
3792 }
3793
3794 __ bind(&stub_call);
3795 __ mov(a1, v0);
3796 __ li(a0, Operand(Smi::FromInt(count_value)));
3797
3798 SetExpressionPosition(expr);
3799
Ben Murdoch097c5b22016-05-18 11:27:45 +01003800 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003801 CallIC(code, expr->CountBinOpFeedbackId());
3802 patch_site.EmitPatchInfo();
3803 __ bind(&done);
3804
3805 if (is_strong(language_mode())) {
3806 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3807 }
3808 // Store the value returned in v0.
3809 switch (assign_type) {
3810 case VARIABLE:
3811 if (expr->is_postfix()) {
3812 { EffectContext context(this);
3813 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3814 Token::ASSIGN, expr->CountSlot());
3815 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3816 context.Plug(v0);
3817 }
3818 // For all contexts except EffectConstant we have the result on
3819 // top of the stack.
3820 if (!context()->IsEffect()) {
3821 context()->PlugTOS();
3822 }
3823 } else {
3824 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3825 Token::ASSIGN, expr->CountSlot());
3826 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3827 context()->Plug(v0);
3828 }
3829 break;
3830 case NAMED_PROPERTY: {
3831 __ mov(StoreDescriptor::ValueRegister(), result_register());
3832 __ li(StoreDescriptor::NameRegister(),
3833 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003834 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003835 EmitLoadStoreICSlot(expr->CountSlot());
3836 CallStoreIC();
3837 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3838 if (expr->is_postfix()) {
3839 if (!context()->IsEffect()) {
3840 context()->PlugTOS();
3841 }
3842 } else {
3843 context()->Plug(v0);
3844 }
3845 break;
3846 }
3847 case NAMED_SUPER_PROPERTY: {
3848 EmitNamedSuperPropertyStore(prop);
3849 if (expr->is_postfix()) {
3850 if (!context()->IsEffect()) {
3851 context()->PlugTOS();
3852 }
3853 } else {
3854 context()->Plug(v0);
3855 }
3856 break;
3857 }
3858 case KEYED_SUPER_PROPERTY: {
3859 EmitKeyedSuperPropertyStore(prop);
3860 if (expr->is_postfix()) {
3861 if (!context()->IsEffect()) {
3862 context()->PlugTOS();
3863 }
3864 } else {
3865 context()->Plug(v0);
3866 }
3867 break;
3868 }
3869 case KEYED_PROPERTY: {
3870 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003871 PopOperands(StoreDescriptor::ReceiverRegister(),
3872 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003873 Handle<Code> ic =
3874 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3875 EmitLoadStoreICSlot(expr->CountSlot());
3876 CallIC(ic);
3877 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3878 if (expr->is_postfix()) {
3879 if (!context()->IsEffect()) {
3880 context()->PlugTOS();
3881 }
3882 } else {
3883 context()->Plug(v0);
3884 }
3885 break;
3886 }
3887 }
3888}
3889
3890
3891void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3892 Expression* sub_expr,
3893 Handle<String> check) {
3894 Label materialize_true, materialize_false;
3895 Label* if_true = NULL;
3896 Label* if_false = NULL;
3897 Label* fall_through = NULL;
3898 context()->PrepareTest(&materialize_true, &materialize_false,
3899 &if_true, &if_false, &fall_through);
3900
3901 { AccumulatorValueContext context(this);
3902 VisitForTypeofValue(sub_expr);
3903 }
3904 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3905
3906 Factory* factory = isolate()->factory();
3907 if (String::Equals(check, factory->number_string())) {
3908 __ JumpIfSmi(v0, if_true);
3909 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3910 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3911 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3912 } else if (String::Equals(check, factory->string_string())) {
3913 __ JumpIfSmi(v0, if_false);
3914 __ GetObjectType(v0, v0, a1);
3915 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3916 fall_through);
3917 } else if (String::Equals(check, factory->symbol_string())) {
3918 __ JumpIfSmi(v0, if_false);
3919 __ GetObjectType(v0, v0, a1);
3920 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3921 } else if (String::Equals(check, factory->boolean_string())) {
3922 __ LoadRoot(at, Heap::kTrueValueRootIndex);
3923 __ Branch(if_true, eq, v0, Operand(at));
3924 __ LoadRoot(at, Heap::kFalseValueRootIndex);
3925 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3926 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003927 __ LoadRoot(at, Heap::kNullValueRootIndex);
3928 __ Branch(if_false, eq, v0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003929 __ JumpIfSmi(v0, if_false);
3930 // Check for undetectable objects => true.
3931 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3932 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3933 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3934 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3935 } else if (String::Equals(check, factory->function_string())) {
3936 __ JumpIfSmi(v0, if_false);
3937 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3938 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3939 __ And(a1, a1,
3940 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3941 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3942 fall_through);
3943 } else if (String::Equals(check, factory->object_string())) {
3944 __ JumpIfSmi(v0, if_false);
3945 __ LoadRoot(at, Heap::kNullValueRootIndex);
3946 __ Branch(if_true, eq, v0, Operand(at));
3947 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3948 __ GetObjectType(v0, v0, a1);
3949 __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3950 // Check for callable or undetectable objects => false.
3951 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3952 __ And(a1, a1,
3953 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3954 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3955// clang-format off
3956#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3957 } else if (String::Equals(check, factory->type##_string())) { \
3958 __ JumpIfSmi(v0, if_false); \
3959 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
3960 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
3961 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3962 SIMD128_TYPES(SIMD128_TYPE)
3963#undef SIMD128_TYPE
3964 // clang-format on
3965 } else {
3966 if (if_false != fall_through) __ jmp(if_false);
3967 }
3968 context()->Plug(if_true, if_false);
3969}
3970
3971
3972void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3973 Comment cmnt(masm_, "[ CompareOperation");
3974 SetExpressionPosition(expr);
3975
3976 // First we try a fast inlined version of the compare when one of
3977 // the operands is a literal.
3978 if (TryLiteralCompare(expr)) return;
3979
3980 // Always perform the comparison for its control flow. Pack the result
3981 // into the expression's context after the comparison is performed.
3982 Label materialize_true, materialize_false;
3983 Label* if_true = NULL;
3984 Label* if_false = NULL;
3985 Label* fall_through = NULL;
3986 context()->PrepareTest(&materialize_true, &materialize_false,
3987 &if_true, &if_false, &fall_through);
3988
3989 Token::Value op = expr->op();
3990 VisitForStackValue(expr->left());
3991 switch (op) {
3992 case Token::IN:
3993 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003994 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003995 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3996 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3997 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3998 break;
3999
4000 case Token::INSTANCEOF: {
4001 VisitForAccumulatorValue(expr->right());
4002 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01004003 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004004 InstanceOfStub stub(isolate());
4005 __ CallStub(&stub);
4006 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4007 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
4008 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
4009 break;
4010 }
4011
4012 default: {
4013 VisitForAccumulatorValue(expr->right());
4014 Condition cc = CompareIC::ComputeCondition(op);
4015 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01004016 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004017
4018 bool inline_smi_code = ShouldInlineSmiCase(op);
4019 JumpPatchSite patch_site(masm_);
4020 if (inline_smi_code) {
4021 Label slow_case;
4022 __ Or(a2, a0, Operand(a1));
4023 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4024 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4025 __ bind(&slow_case);
4026 }
4027
Ben Murdoch097c5b22016-05-18 11:27:45 +01004028 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004029 CallIC(ic, expr->CompareOperationFeedbackId());
4030 patch_site.EmitPatchInfo();
4031 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4032 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4033 }
4034 }
4035
4036 // Convert the result of the comparison into one expected for this
4037 // expression's context.
4038 context()->Plug(if_true, if_false);
4039}
4040
4041
4042void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4043 Expression* sub_expr,
4044 NilValue nil) {
4045 Label materialize_true, materialize_false;
4046 Label* if_true = NULL;
4047 Label* if_false = NULL;
4048 Label* fall_through = NULL;
4049 context()->PrepareTest(&materialize_true, &materialize_false,
4050 &if_true, &if_false, &fall_through);
4051
4052 VisitForAccumulatorValue(sub_expr);
4053 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4054 __ mov(a0, result_register());
4055 if (expr->op() == Token::EQ_STRICT) {
4056 Heap::RootListIndex nil_value = nil == kNullValue ?
4057 Heap::kNullValueRootIndex :
4058 Heap::kUndefinedValueRootIndex;
4059 __ LoadRoot(a1, nil_value);
4060 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4061 } else {
4062 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4063 CallIC(ic, expr->CompareOperationFeedbackId());
4064 __ LoadRoot(a1, Heap::kTrueValueRootIndex);
4065 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
4066 }
4067 context()->Plug(if_true, if_false);
4068}
4069
4070
4071void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4072 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4073 context()->Plug(v0);
4074}
4075
4076
4077Register FullCodeGenerator::result_register() {
4078 return v0;
4079}
4080
4081
4082Register FullCodeGenerator::context_register() {
4083 return cp;
4084}
4085
4086
4087void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4088 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4089 DCHECK(IsAligned(frame_offset, kPointerSize));
4090 // __ sw(value, MemOperand(fp, frame_offset));
4091 __ sd(value, MemOperand(fp, frame_offset));
4092}
4093
4094
4095void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4096 __ ld(dst, ContextMemOperand(cp, context_index));
4097}
4098
4099
4100void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4101 Scope* closure_scope = scope()->ClosureScope();
4102 if (closure_scope->is_script_scope() ||
4103 closure_scope->is_module_scope()) {
4104 // Contexts nested in the native context have a canonical empty function
4105 // as their closure, not the anonymous closure containing the global
4106 // code.
4107 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
4108 } else if (closure_scope->is_eval_scope()) {
4109 // Contexts created by a call to eval have the same closure as the
4110 // context calling eval, not the anonymous closure containing the eval
4111 // code. Fetch it from the context.
4112 __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4113 } else {
4114 DCHECK(closure_scope->is_function_scope());
4115 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4116 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004117 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004118}
4119
4120
4121// ----------------------------------------------------------------------------
4122// Non-local control flow support.
4123
4124void FullCodeGenerator::EnterFinallyBlock() {
4125 DCHECK(!result_register().is(a1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004126 // Store pending message while executing finally block.
4127 ExternalReference pending_message_obj =
4128 ExternalReference::address_of_pending_message_obj(isolate());
4129 __ li(at, Operand(pending_message_obj));
4130 __ ld(a1, MemOperand(at));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004131 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004132
4133 ClearPendingMessage();
4134}
4135
4136
4137void FullCodeGenerator::ExitFinallyBlock() {
4138 DCHECK(!result_register().is(a1));
4139 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004140 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004141 ExternalReference pending_message_obj =
4142 ExternalReference::address_of_pending_message_obj(isolate());
4143 __ li(at, Operand(pending_message_obj));
4144 __ sd(a1, MemOperand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004145}
4146
4147
4148void FullCodeGenerator::ClearPendingMessage() {
4149 DCHECK(!result_register().is(a1));
4150 ExternalReference pending_message_obj =
4151 ExternalReference::address_of_pending_message_obj(isolate());
4152 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
4153 __ li(at, Operand(pending_message_obj));
4154 __ sd(a1, MemOperand(at));
4155}
4156
4157
4158void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4159 DCHECK(!slot.IsInvalid());
4160 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
4161 Operand(SmiFromSlot(slot)));
4162}
4163
Ben Murdoch097c5b22016-05-18 11:27:45 +01004164void FullCodeGenerator::DeferredCommands::EmitCommands() {
4165 __ Pop(result_register()); // Restore the accumulator.
4166 __ Pop(a1); // Get the token.
4167 for (DeferredCommand cmd : commands_) {
4168 Label skip;
4169 __ li(at, Operand(Smi::FromInt(cmd.token)));
4170 __ Branch(&skip, ne, a1, Operand(at));
4171 switch (cmd.command) {
4172 case kReturn:
4173 codegen_->EmitUnwindAndReturn();
4174 break;
4175 case kThrow:
4176 __ Push(result_register());
4177 __ CallRuntime(Runtime::kReThrow);
4178 break;
4179 case kContinue:
4180 codegen_->EmitContinue(cmd.target);
4181 break;
4182 case kBreak:
4183 codegen_->EmitBreak(cmd.target);
4184 break;
4185 }
4186 __ bind(&skip);
4187 }
4188}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004189
4190#undef __
4191
4192
4193void BackEdgeTable::PatchAt(Code* unoptimized_code,
4194 Address pc,
4195 BackEdgeState target_state,
4196 Code* replacement_code) {
4197 static const int kInstrSize = Assembler::kInstrSize;
4198 Address branch_address = pc - 8 * kInstrSize;
4199 Isolate* isolate = unoptimized_code->GetIsolate();
4200 CodePatcher patcher(isolate, branch_address, 1);
4201
4202 switch (target_state) {
4203 case INTERRUPT:
4204 // slt at, a3, zero_reg (in case of count based interrupts)
4205 // beq at, zero_reg, ok
4206 // lui t9, <interrupt stub address> upper
4207 // ori t9, <interrupt stub address> u-middle
4208 // dsll t9, t9, 16
4209 // ori t9, <interrupt stub address> lower
4210 // jalr t9
4211 // nop
4212 // ok-label ----- pc_after points here
4213 patcher.masm()->slt(at, a3, zero_reg);
4214 break;
4215 case ON_STACK_REPLACEMENT:
4216 case OSR_AFTER_STACK_CHECK:
4217 // addiu at, zero_reg, 1
4218 // beq at, zero_reg, ok ;; Not changed
4219 // lui t9, <on-stack replacement address> upper
4220 // ori t9, <on-stack replacement address> middle
4221 // dsll t9, t9, 16
4222 // ori t9, <on-stack replacement address> lower
4223 // jalr t9 ;; Not changed
4224 // nop ;; Not changed
4225 // ok-label ----- pc_after points here
4226 patcher.masm()->daddiu(at, zero_reg, 1);
4227 break;
4228 }
4229 Address pc_immediate_load_address = pc - 6 * kInstrSize;
4230 // Replace the stack check address in the load-immediate (6-instr sequence)
4231 // with the entry address of the replacement code.
4232 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
4233 replacement_code->entry());
4234
4235 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4236 unoptimized_code, pc_immediate_load_address, replacement_code);
4237}
4238
4239
4240BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4241 Isolate* isolate,
4242 Code* unoptimized_code,
4243 Address pc) {
4244 static const int kInstrSize = Assembler::kInstrSize;
4245 Address branch_address = pc - 8 * kInstrSize;
4246 Address pc_immediate_load_address = pc - 6 * kInstrSize;
4247
4248 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
4249 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4250 DCHECK(reinterpret_cast<uint64_t>(
4251 Assembler::target_address_at(pc_immediate_load_address)) ==
4252 reinterpret_cast<uint64_t>(
4253 isolate->builtins()->InterruptCheck()->entry()));
4254 return INTERRUPT;
4255 }
4256
4257 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4258
4259 if (reinterpret_cast<uint64_t>(
4260 Assembler::target_address_at(pc_immediate_load_address)) ==
4261 reinterpret_cast<uint64_t>(
4262 isolate->builtins()->OnStackReplacement()->entry())) {
4263 return ON_STACK_REPLACEMENT;
4264 }
4265
4266 DCHECK(reinterpret_cast<uint64_t>(
4267 Assembler::target_address_at(pc_immediate_load_address)) ==
4268 reinterpret_cast<uint64_t>(
4269 isolate->builtins()->OsrAfterStackCheck()->entry()));
4270 return OSR_AFTER_STACK_CHECK;
4271}
4272
4273
4274} // namespace internal
4275} // namespace v8
4276
4277#endif // V8_TARGET_ARCH_MIPS64