blob: c8ce20459069312542acdbb3ca94268baaa4d26a [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS
6
7// Note on Mips implementation:
8//
9// The result_register() for mips is the 'v0' register, which is defined
10// by the ABI to contain function return values. However, the first
11// parameter to a function is defined to be 'a0'. So there are many
12// places where we have to move a previous result in v0 to a0 for the
13// next call: mov(a0, v0). This is not needed on the other architectures.
14
15#include "src/ast/scopes.h"
16#include "src/code-factory.h"
17#include "src/code-stubs.h"
18#include "src/codegen.h"
19#include "src/debug/debug.h"
20#include "src/full-codegen/full-codegen.h"
21#include "src/ic/ic.h"
22#include "src/parsing/parser.h"
23
24#include "src/mips/code-stubs-mips.h"
25#include "src/mips/macro-assembler-mips.h"
26
27namespace v8 {
28namespace internal {
29
Ben Murdoch097c5b22016-05-18 11:27:45 +010030#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031
32// A patch site is a location in the code which it is possible to patch. This
33// class has a number of methods to emit the code which is patchable and the
34// method EmitPatchInfo to record a marker back to the patchable code. This
35// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
36// (raw 16 bit immediate value is used) is the delta from the pc to the first
37// instruction of the patchable code.
38// The marker instruction is effectively a NOP (dest is zero_reg) and will
39// never be emitted by normal code.
40class JumpPatchSite BASE_EMBEDDED {
41 public:
42 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
43#ifdef DEBUG
44 info_emitted_ = false;
45#endif
46 }
47
48 ~JumpPatchSite() {
49 DCHECK(patch_site_.is_bound() == info_emitted_);
50 }
51
52 // When initially emitting this ensure that a jump is always generated to skip
53 // the inlined smi code.
54 void EmitJumpIfNotSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
57 __ bind(&patch_site_);
58 __ andi(at, reg, 0);
59 // Always taken before patched.
60 __ BranchShort(target, eq, at, Operand(zero_reg));
61 }
62
63 // When initially emitting this ensure that a jump is never generated to skip
64 // the inlined smi code.
65 void EmitJumpIfSmi(Register reg, Label* target) {
66 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 __ bind(&patch_site_);
69 __ andi(at, reg, 0);
70 // Never taken before patched.
71 __ BranchShort(target, ne, at, Operand(zero_reg));
72 }
73
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
77 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
78 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
79#ifdef DEBUG
80 info_emitted_ = true;
81#endif
82 } else {
83 __ nop(); // Signals no inlined code.
84 }
85 }
86
87 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010088 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089 MacroAssembler* masm_;
90 Label patch_site_;
91#ifdef DEBUG
92 bool info_emitted_;
93#endif
94};
95
96
97// Generate code for a JS function. On entry to the function the receiver
98// and arguments have been pushed on the stack left to right. The actual
99// argument count matches the formal parameter count expected by the
100// function.
101//
102// The live registers are:
103// o a1: the JS function object being called (i.e. ourselves)
104// o a3: the new target value
105// o cp: our context
106// o fp: our caller's frame pointer
107// o sp: stack pointer
108// o ra: return address
109//
110// The function builds a JS frame. Please see JavaScriptFrameConstants in
111// frames-mips.h for its layout.
112void FullCodeGenerator::Generate() {
113 CompilationInfo* info = info_;
114 profiling_counter_ = isolate()->factory()->NewCell(
115 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116 SetFunctionPosition(literal());
117 Comment cmnt(masm_, "[ function compiled by full code generator");
118
119 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000121 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
122 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
123 __ lw(a2, MemOperand(sp, receiver_offset));
124 __ AssertNotSmi(a2);
125 __ GetObjectType(a2, a2, a2);
126 __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
127 Operand(FIRST_JS_RECEIVER_TYPE));
128 }
129
130 // Open a frame scope to indicate that there is a frame on the stack. The
131 // MANUAL indicates that the scope shouldn't actually generate code to set up
132 // the frame (that is done below).
133 FrameScope frame_scope(masm_, StackFrame::MANUAL);
134
135 info->set_prologue_offset(masm_->pc_offset());
136 __ Prologue(info->GeneratePreagedPrologue());
137
138 { Comment cmnt(masm_, "[ Allocate locals");
139 int locals_count = info->scope()->num_stack_slots();
140 // Generators allocate locals, if any, in context slots.
141 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100142 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143 if (locals_count > 0) {
144 if (locals_count >= 128) {
145 Label ok;
146 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
147 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
148 __ Branch(&ok, hs, t5, Operand(a2));
149 __ CallRuntime(Runtime::kThrowStackOverflow);
150 __ bind(&ok);
151 }
152 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
153 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
154 if (locals_count >= kMaxPushes) {
155 int loop_iterations = locals_count / kMaxPushes;
156 __ li(a2, Operand(loop_iterations));
157 Label loop_header;
158 __ bind(&loop_header);
159 // Do pushes.
160 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
161 for (int i = 0; i < kMaxPushes; i++) {
162 __ sw(t5, MemOperand(sp, i * kPointerSize));
163 }
164 // Continue loop if not done.
165 __ Subu(a2, a2, Operand(1));
166 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
167 }
168 int remaining = locals_count % kMaxPushes;
169 // Emit the remaining pushes.
170 __ Subu(sp, sp, Operand(remaining * kPointerSize));
171 for (int i = 0; i < remaining; i++) {
172 __ sw(t5, MemOperand(sp, i * kPointerSize));
173 }
174 }
175 }
176
177 bool function_in_register_a1 = true;
178
179 // Possibly allocate a local context.
180 if (info->scope()->num_heap_slots() > 0) {
181 Comment cmnt(masm_, "[ Allocate context");
182 // Argument to NewContext is the function, which is still in a1.
183 bool need_write_barrier = true;
184 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185 if (info->scope()->is_script_scope()) {
186 __ push(a1);
187 __ Push(info->scope()->GetScopeInfo(info->isolate()));
188 __ CallRuntime(Runtime::kNewScriptContext);
189 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
190 // The new target value is not used, clobbering is safe.
191 DCHECK_NULL(info->scope()->new_target_var());
192 } else {
193 if (info->scope()->new_target_var() != nullptr) {
194 __ push(a3); // Preserve new target.
195 }
196 if (slots <= FastNewContextStub::kMaximumSlots) {
197 FastNewContextStub stub(isolate(), slots);
198 __ CallStub(&stub);
199 // Result of FastNewContextStub is always in new space.
200 need_write_barrier = false;
201 } else {
202 __ push(a1);
203 __ CallRuntime(Runtime::kNewFunctionContext);
204 }
205 if (info->scope()->new_target_var() != nullptr) {
206 __ pop(a3); // Restore new target.
207 }
208 }
209 function_in_register_a1 = false;
210 // Context is returned in v0. It replaces the context passed to us.
211 // It's saved in the stack and kept live in cp.
212 __ mov(cp, v0);
213 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
214 // Copy any necessary parameters into the context.
215 int num_parameters = info->scope()->num_parameters();
216 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
217 for (int i = first_parameter; i < num_parameters; i++) {
218 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
219 if (var->IsContextSlot()) {
220 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221 (num_parameters - 1 - i) * kPointerSize;
222 // Load parameter from stack.
223 __ lw(a0, MemOperand(fp, parameter_offset));
224 // Store it in the context.
225 MemOperand target = ContextMemOperand(cp, var->index());
226 __ sw(a0, target);
227
228 // Update the write barrier.
229 if (need_write_barrier) {
230 __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
231 kRAHasBeenSaved, kDontSaveFPRegs);
232 } else if (FLAG_debug_code) {
233 Label done;
234 __ JumpIfInNewSpace(cp, a0, &done);
235 __ Abort(kExpectedNewSpaceObject);
236 __ bind(&done);
237 }
238 }
239 }
240 }
241
242 // Register holding this function and new target are both trashed in case we
243 // bailout here. But since that can happen only when new target is not used
244 // and we allocate a context, the value of |function_in_register| is correct.
245 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
246
247 // Possibly set up a local binding to the this function which is used in
248 // derived constructors with super calls.
249 Variable* this_function_var = scope()->this_function_var();
250 if (this_function_var != nullptr) {
251 Comment cmnt(masm_, "[ This function");
252 if (!function_in_register_a1) {
253 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
254 // The write barrier clobbers register again, keep it marked as such.
255 }
256 SetVar(this_function_var, a1, a0, a2);
257 }
258
259 // Possibly set up a local binding to the new target value.
260 Variable* new_target_var = scope()->new_target_var();
261 if (new_target_var != nullptr) {
262 Comment cmnt(masm_, "[ new.target");
263 SetVar(new_target_var, a3, a0, a2);
264 }
265
266 // Possibly allocate RestParameters
267 int rest_index;
268 Variable* rest_param = scope()->rest_parameter(&rest_index);
269 if (rest_param) {
270 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100271 if (!function_in_register_a1) {
272 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
273 }
274 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100276 function_in_register_a1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277 SetVar(rest_param, v0, a1, a2);
278 }
279
280 Variable* arguments = scope()->arguments();
281 if (arguments != NULL) {
282 // Function uses arguments object.
283 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000284 if (!function_in_register_a1) {
285 // Load this again, if it's used by the local context below.
286 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
287 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100288 if (is_strict(language_mode()) || !has_simple_parameters()) {
289 FastNewStrictArgumentsStub stub(isolate());
290 __ CallStub(&stub);
291 } else if (literal()->has_duplicate_parameters()) {
292 __ Push(a1);
293 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
294 } else {
295 FastNewSloppyArgumentsStub stub(isolate());
296 __ CallStub(&stub);
297 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000298
299 SetVar(arguments, v0, a1, a2);
300 }
301
302 if (FLAG_trace) {
303 __ CallRuntime(Runtime::kTraceEnter);
304 }
305
306 // Visit the declarations and body unless there is an illegal
307 // redeclaration.
308 if (scope()->HasIllegalRedeclaration()) {
309 Comment cmnt(masm_, "[ Declarations");
310 VisitForEffect(scope()->GetIllegalRedeclaration());
311
312 } else {
313 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
314 { Comment cmnt(masm_, "[ Declarations");
315 VisitDeclarations(scope()->declarations());
316 }
317
318 // Assert that the declarations do not use ICs. Otherwise the debugger
319 // won't be able to redirect a PC at an IC to the correct IC in newly
320 // recompiled code.
321 DCHECK_EQ(0, ic_total_count_);
322
323 { Comment cmnt(masm_, "[ Stack check");
324 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
325 Label ok;
326 __ LoadRoot(at, Heap::kStackLimitRootIndex);
327 __ Branch(&ok, hs, sp, Operand(at));
328 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
329 PredictableCodeSizeScope predictable(masm_,
330 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
331 __ Call(stack_check, RelocInfo::CODE_TARGET);
332 __ bind(&ok);
333 }
334
335 { Comment cmnt(masm_, "[ Body");
336 DCHECK(loop_depth() == 0);
337 VisitStatements(literal()->body());
338 DCHECK(loop_depth() == 0);
339 }
340 }
341
342 // Always emit a 'return undefined' in case control fell off the end of
343 // the body.
344 { Comment cmnt(masm_, "[ return <undefined>;");
345 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
346 }
347 EmitReturnSequence();
348}
349
350
351void FullCodeGenerator::ClearAccumulator() {
352 DCHECK(Smi::FromInt(0) == 0);
353 __ mov(v0, zero_reg);
354}
355
356
357void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
358 __ li(a2, Operand(profiling_counter_));
359 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
360 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
361 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
362}
363
364
365void FullCodeGenerator::EmitProfilingCounterReset() {
366 int reset_value = FLAG_interrupt_budget;
367 if (info_->is_debug()) {
368 // Detect debug break requests as soon as possible.
369 reset_value = FLAG_interrupt_budget >> 4;
370 }
371 __ li(a2, Operand(profiling_counter_));
372 __ li(a3, Operand(Smi::FromInt(reset_value)));
373 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
374}
375
376
377void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
378 Label* back_edge_target) {
379 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
380 // to make sure it is constant. Branch may emit a skip-or-jump sequence
381 // instead of the normal Branch. It seems that the "skip" part of that
382 // sequence is about as long as this Branch would be so it is safe to ignore
383 // that.
384 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
385 Comment cmnt(masm_, "[ Back edge bookkeeping");
386 Label ok;
387 DCHECK(back_edge_target->is_bound());
388 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
389 int weight = Min(kMaxBackEdgeWeight,
390 Max(1, distance / kCodeSizeMultiplier));
391 EmitProfilingCounterDecrement(weight);
392 __ slt(at, a3, zero_reg);
393 __ beq(at, zero_reg, &ok);
394 // Call will emit a li t9 first, so it is safe to use the delay slot.
395 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
396 // Record a mapping of this PC offset to the OSR id. This is used to find
397 // the AST id from the unoptimized code in order to use it as a key into
398 // the deoptimization input data found in the optimized code.
399 RecordBackEdge(stmt->OsrEntryId());
400 EmitProfilingCounterReset();
401
402 __ bind(&ok);
403 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
404 // Record a mapping of the OSR id to this PC. This is used if the OSR
405 // entry becomes the target of a bailout. We don't expect it to be, but
406 // we want it to work if it is.
407 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
408}
409
Ben Murdoch097c5b22016-05-18 11:27:45 +0100410void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
411 bool is_tail_call) {
412 // Pretend that the exit is a backwards jump to the entry.
413 int weight = 1;
414 if (info_->ShouldSelfOptimize()) {
415 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
416 } else {
417 int distance = masm_->pc_offset();
418 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
419 }
420 EmitProfilingCounterDecrement(weight);
421 Label ok;
422 __ Branch(&ok, ge, a3, Operand(zero_reg));
423 // Don't need to save result register if we are going to do a tail call.
424 if (!is_tail_call) {
425 __ push(v0);
426 }
427 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
428 if (!is_tail_call) {
429 __ pop(v0);
430 }
431 EmitProfilingCounterReset();
432 __ bind(&ok);
433}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000434
435void FullCodeGenerator::EmitReturnSequence() {
436 Comment cmnt(masm_, "[ Return sequence");
437 if (return_label_.is_bound()) {
438 __ Branch(&return_label_);
439 } else {
440 __ bind(&return_label_);
441 if (FLAG_trace) {
442 // Push the return value on the stack as the parameter.
443 // Runtime::TraceExit returns its parameter in v0.
444 __ push(v0);
445 __ CallRuntime(Runtime::kTraceExit);
446 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100447 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448
449 // Make sure that the constant pool is not emitted inside of the return
450 // sequence.
451 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
452 // Here we use masm_-> instead of the __ macro to avoid the code coverage
453 // tool from instrumenting as we rely on the code size here.
454 int32_t arg_count = info_->scope()->num_parameters() + 1;
455 int32_t sp_delta = arg_count * kPointerSize;
456 SetReturnPosition(literal());
457 masm_->mov(sp, fp);
458 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
459 masm_->Addu(sp, sp, Operand(sp_delta));
460 masm_->Jump(ra);
461 }
462 }
463}
464
465
466void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
467 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
468 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100469 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000470}
471
472
473void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
474}
475
476
477void FullCodeGenerator::AccumulatorValueContext::Plug(
478 Heap::RootListIndex index) const {
479 __ LoadRoot(result_register(), index);
480}
481
482
483void FullCodeGenerator::StackValueContext::Plug(
484 Heap::RootListIndex index) const {
485 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100486 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487}
488
489
490void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
491 codegen()->PrepareForBailoutBeforeSplit(condition(),
492 true,
493 true_label_,
494 false_label_);
495 if (index == Heap::kUndefinedValueRootIndex ||
496 index == Heap::kNullValueRootIndex ||
497 index == Heap::kFalseValueRootIndex) {
498 if (false_label_ != fall_through_) __ Branch(false_label_);
499 } else if (index == Heap::kTrueValueRootIndex) {
500 if (true_label_ != fall_through_) __ Branch(true_label_);
501 } else {
502 __ LoadRoot(result_register(), index);
503 codegen()->DoTest(this);
504 }
505}
506
507
508void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
509}
510
511
512void FullCodeGenerator::AccumulatorValueContext::Plug(
513 Handle<Object> lit) const {
514 __ li(result_register(), Operand(lit));
515}
516
517
518void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
519 // Immediates cannot be pushed directly.
520 __ li(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100521 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000522}
523
524
525void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
526 codegen()->PrepareForBailoutBeforeSplit(condition(),
527 true,
528 true_label_,
529 false_label_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100530 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000531 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
532 if (false_label_ != fall_through_) __ Branch(false_label_);
533 } else if (lit->IsTrue() || lit->IsJSObject()) {
534 if (true_label_ != fall_through_) __ Branch(true_label_);
535 } else if (lit->IsString()) {
536 if (String::cast(*lit)->length() == 0) {
537 if (false_label_ != fall_through_) __ Branch(false_label_);
538 } else {
539 if (true_label_ != fall_through_) __ Branch(true_label_);
540 }
541 } else if (lit->IsSmi()) {
542 if (Smi::cast(*lit)->value() == 0) {
543 if (false_label_ != fall_through_) __ Branch(false_label_);
544 } else {
545 if (true_label_ != fall_through_) __ Branch(true_label_);
546 }
547 } else {
548 // For simplicity we always test the accumulator register.
549 __ li(result_register(), Operand(lit));
550 codegen()->DoTest(this);
551 }
552}
553
554
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000555void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
556 Register reg) const {
557 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100558 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000559 __ sw(reg, MemOperand(sp, 0));
560}
561
562
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
564 Label* materialize_false) const {
565 DCHECK(materialize_true == materialize_false);
566 __ bind(materialize_true);
567}
568
569
570void FullCodeGenerator::AccumulatorValueContext::Plug(
571 Label* materialize_true,
572 Label* materialize_false) const {
573 Label done;
574 __ bind(materialize_true);
575 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
576 __ Branch(&done);
577 __ bind(materialize_false);
578 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
579 __ bind(&done);
580}
581
582
583void FullCodeGenerator::StackValueContext::Plug(
584 Label* materialize_true,
585 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100586 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000587 Label done;
588 __ bind(materialize_true);
589 __ LoadRoot(at, Heap::kTrueValueRootIndex);
590 // Push the value as the following branch can clobber at in long branch mode.
591 __ push(at);
592 __ Branch(&done);
593 __ bind(materialize_false);
594 __ LoadRoot(at, Heap::kFalseValueRootIndex);
595 __ push(at);
596 __ bind(&done);
597}
598
599
600void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
601 Label* materialize_false) const {
602 DCHECK(materialize_true == true_label_);
603 DCHECK(materialize_false == false_label_);
604}
605
606
607void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
608 Heap::RootListIndex value_root_index =
609 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
610 __ LoadRoot(result_register(), value_root_index);
611}
612
613
614void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
615 Heap::RootListIndex value_root_index =
616 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
617 __ LoadRoot(at, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100618 codegen()->PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619}
620
621
622void FullCodeGenerator::TestContext::Plug(bool flag) const {
623 codegen()->PrepareForBailoutBeforeSplit(condition(),
624 true,
625 true_label_,
626 false_label_);
627 if (flag) {
628 if (true_label_ != fall_through_) __ Branch(true_label_);
629 } else {
630 if (false_label_ != fall_through_) __ Branch(false_label_);
631 }
632}
633
634
635void FullCodeGenerator::DoTest(Expression* condition,
636 Label* if_true,
637 Label* if_false,
638 Label* fall_through) {
639 __ mov(a0, result_register());
640 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
641 CallIC(ic, condition->test_id());
642 __ LoadRoot(at, Heap::kTrueValueRootIndex);
643 Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
644}
645
646
647void FullCodeGenerator::Split(Condition cc,
648 Register lhs,
649 const Operand& rhs,
650 Label* if_true,
651 Label* if_false,
652 Label* fall_through) {
653 if (if_false == fall_through) {
654 __ Branch(if_true, cc, lhs, rhs);
655 } else if (if_true == fall_through) {
656 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
657 } else {
658 __ Branch(if_true, cc, lhs, rhs);
659 __ Branch(if_false);
660 }
661}
662
663
664MemOperand FullCodeGenerator::StackOperand(Variable* var) {
665 DCHECK(var->IsStackAllocated());
666 // Offset is negative because higher indexes are at lower addresses.
667 int offset = -var->index() * kPointerSize;
668 // Adjust by a (parameter or local) base offset.
669 if (var->IsParameter()) {
670 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
671 } else {
672 offset += JavaScriptFrameConstants::kLocal0Offset;
673 }
674 return MemOperand(fp, offset);
675}
676
677
678MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
679 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
680 if (var->IsContextSlot()) {
681 int context_chain_length = scope()->ContextChainLength(var->scope());
682 __ LoadContext(scratch, context_chain_length);
683 return ContextMemOperand(scratch, var->index());
684 } else {
685 return StackOperand(var);
686 }
687}
688
689
690void FullCodeGenerator::GetVar(Register dest, Variable* var) {
691 // Use destination as scratch.
692 MemOperand location = VarOperand(var, dest);
693 __ lw(dest, location);
694}
695
696
697void FullCodeGenerator::SetVar(Variable* var,
698 Register src,
699 Register scratch0,
700 Register scratch1) {
701 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702 DCHECK(!scratch0.is(src));
703 DCHECK(!scratch0.is(scratch1));
704 DCHECK(!scratch1.is(src));
705 MemOperand location = VarOperand(var, scratch0);
706 __ sw(src, location);
707 // Emit the write barrier code if the location is in the heap.
708 if (var->IsContextSlot()) {
709 __ RecordWriteContextSlot(scratch0,
710 location.offset(),
711 src,
712 scratch1,
713 kRAHasBeenSaved,
714 kDontSaveFPRegs);
715 }
716}
717
718
719void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
720 bool should_normalize,
721 Label* if_true,
722 Label* if_false) {
723 // Only prepare for bailouts before splits if we're in a test
724 // context. Otherwise, we let the Visit function deal with the
725 // preparation to avoid preparing with the same AST id twice.
726 if (!context()->IsTest()) return;
727
728 Label skip;
729 if (should_normalize) __ Branch(&skip);
730 PrepareForBailout(expr, TOS_REG);
731 if (should_normalize) {
732 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
733 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
734 __ bind(&skip);
735 }
736}
737
738
739void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
740 // The variable in the declaration always resides in the current function
741 // context.
742 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100743 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000744 // Check that we're not inside a with or catch context.
745 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
746 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
747 __ Check(ne, kDeclarationInWithContext,
748 a1, Operand(t0));
749 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
750 __ Check(ne, kDeclarationInCatchContext,
751 a1, Operand(t0));
752 }
753}
754
755
756void FullCodeGenerator::VisitVariableDeclaration(
757 VariableDeclaration* declaration) {
758 // If it was not possible to allocate the variable at compile time, we
759 // need to "declare" it at runtime to make sure it actually exists in the
760 // local context.
761 VariableProxy* proxy = declaration->proxy();
762 VariableMode mode = declaration->mode();
763 Variable* variable = proxy->var();
764 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
765 switch (variable->location()) {
766 case VariableLocation::GLOBAL:
767 case VariableLocation::UNALLOCATED:
768 globals_->Add(variable->name(), zone());
769 globals_->Add(variable->binding_needs_init()
770 ? isolate()->factory()->the_hole_value()
771 : isolate()->factory()->undefined_value(),
772 zone());
773 break;
774
775 case VariableLocation::PARAMETER:
776 case VariableLocation::LOCAL:
777 if (hole_init) {
778 Comment cmnt(masm_, "[ VariableDeclaration");
779 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
780 __ sw(t0, StackOperand(variable));
781 }
782 break;
783
784 case VariableLocation::CONTEXT:
785 if (hole_init) {
786 Comment cmnt(masm_, "[ VariableDeclaration");
787 EmitDebugCheckDeclarationContext(variable);
788 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
789 __ sw(at, ContextMemOperand(cp, variable->index()));
790 // No write barrier since the_hole_value is in old space.
791 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
792 }
793 break;
794
795 case VariableLocation::LOOKUP: {
796 Comment cmnt(masm_, "[ VariableDeclaration");
797 __ li(a2, Operand(variable->name()));
798 // Declaration nodes are always introduced in one of four modes.
799 DCHECK(IsDeclaredVariableMode(mode));
800 // Push initial value, if any.
801 // Note: For variables we must not push an initial value (such as
802 // 'undefined') because we may have a (legal) redeclaration and we
803 // must not destroy the current value.
804 if (hole_init) {
805 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
806 } else {
807 DCHECK(Smi::FromInt(0) == 0);
808 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
809 }
810 __ Push(a2, a0);
811 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
812 __ CallRuntime(Runtime::kDeclareLookupSlot);
813 break;
814 }
815 }
816}
817
818
819void FullCodeGenerator::VisitFunctionDeclaration(
820 FunctionDeclaration* declaration) {
821 VariableProxy* proxy = declaration->proxy();
822 Variable* variable = proxy->var();
823 switch (variable->location()) {
824 case VariableLocation::GLOBAL:
825 case VariableLocation::UNALLOCATED: {
826 globals_->Add(variable->name(), zone());
827 Handle<SharedFunctionInfo> function =
828 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
829 // Check for stack-overflow exception.
830 if (function.is_null()) return SetStackOverflow();
831 globals_->Add(function, zone());
832 break;
833 }
834
835 case VariableLocation::PARAMETER:
836 case VariableLocation::LOCAL: {
837 Comment cmnt(masm_, "[ FunctionDeclaration");
838 VisitForAccumulatorValue(declaration->fun());
839 __ sw(result_register(), StackOperand(variable));
840 break;
841 }
842
843 case VariableLocation::CONTEXT: {
844 Comment cmnt(masm_, "[ FunctionDeclaration");
845 EmitDebugCheckDeclarationContext(variable);
846 VisitForAccumulatorValue(declaration->fun());
847 __ sw(result_register(), ContextMemOperand(cp, variable->index()));
848 int offset = Context::SlotOffset(variable->index());
849 // We know that we have written a function, which is not a smi.
850 __ RecordWriteContextSlot(cp,
851 offset,
852 result_register(),
853 a2,
854 kRAHasBeenSaved,
855 kDontSaveFPRegs,
856 EMIT_REMEMBERED_SET,
857 OMIT_SMI_CHECK);
858 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
859 break;
860 }
861
862 case VariableLocation::LOOKUP: {
863 Comment cmnt(masm_, "[ FunctionDeclaration");
864 __ li(a2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100865 PushOperand(a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000866 // Push initial value for function declaration.
867 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100868 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
869 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000870 break;
871 }
872 }
873}
874
875
876void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
877 // Call the runtime to declare the globals.
878 __ li(a1, Operand(pairs));
879 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
880 __ Push(a1, a0);
881 __ CallRuntime(Runtime::kDeclareGlobals);
882 // Return value is ignored.
883}
884
885
886void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
887 // Call the runtime to declare the modules.
888 __ Push(descriptions);
889 __ CallRuntime(Runtime::kDeclareModules);
890 // Return value is ignored.
891}
892
893
894void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
895 Comment cmnt(masm_, "[ SwitchStatement");
896 Breakable nested_statement(this, stmt);
897 SetStatementPosition(stmt);
898
899 // Keep the switch value on the stack until a case matches.
900 VisitForStackValue(stmt->tag());
901 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
902
903 ZoneList<CaseClause*>* clauses = stmt->cases();
904 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
905
906 Label next_test; // Recycled for each test.
907 // Compile all the tests with branches to their bodies.
908 for (int i = 0; i < clauses->length(); i++) {
909 CaseClause* clause = clauses->at(i);
910 clause->body_target()->Unuse();
911
912 // The default is not a test, but remember it as final fall through.
913 if (clause->is_default()) {
914 default_clause = clause;
915 continue;
916 }
917
918 Comment cmnt(masm_, "[ Case comparison");
919 __ bind(&next_test);
920 next_test.Unuse();
921
922 // Compile the label expression.
923 VisitForAccumulatorValue(clause->label());
924 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
925
926 // Perform the comparison as if via '==='.
927 __ lw(a1, MemOperand(sp, 0)); // Switch value.
928 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
929 JumpPatchSite patch_site(masm_);
930 if (inline_smi_code) {
931 Label slow_case;
932 __ or_(a2, a1, a0);
933 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
934
935 __ Branch(&next_test, ne, a1, Operand(a0));
936 __ Drop(1); // Switch value is no longer needed.
937 __ Branch(clause->body_target());
938
939 __ bind(&slow_case);
940 }
941
942 // Record position before stub call for type feedback.
943 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100944 Handle<Code> ic =
945 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000946 CallIC(ic, clause->CompareId());
947 patch_site.EmitPatchInfo();
948
949 Label skip;
950 __ Branch(&skip);
951 PrepareForBailout(clause, TOS_REG);
952 __ LoadRoot(at, Heap::kTrueValueRootIndex);
953 __ Branch(&next_test, ne, v0, Operand(at));
954 __ Drop(1);
955 __ Branch(clause->body_target());
956 __ bind(&skip);
957
958 __ Branch(&next_test, ne, v0, Operand(zero_reg));
959 __ Drop(1); // Switch value is no longer needed.
960 __ Branch(clause->body_target());
961 }
962
963 // Discard the test value and jump to the default if present, otherwise to
964 // the end of the statement.
965 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100966 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000967 if (default_clause == NULL) {
968 __ Branch(nested_statement.break_label());
969 } else {
970 __ Branch(default_clause->body_target());
971 }
972
973 // Compile all the case bodies.
974 for (int i = 0; i < clauses->length(); i++) {
975 Comment cmnt(masm_, "[ Case body");
976 CaseClause* clause = clauses->at(i);
977 __ bind(clause->body_target());
978 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
979 VisitStatements(clause->statements());
980 }
981
982 __ bind(nested_statement.break_label());
983 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
984}
985
986
987void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
988 Comment cmnt(masm_, "[ ForInStatement");
989 SetStatementPosition(stmt, SKIP_BREAK);
990
991 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
992
993 Label loop, exit;
994 ForIn loop_statement(this, stmt);
995 increment_loop_depth();
996
Ben Murdoch097c5b22016-05-18 11:27:45 +0100997 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000998 SetExpressionAsStatementPosition(stmt->enumerable());
999 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001000 __ mov(a0, result_register());
1001 OperandStackDepthIncrement(ForIn::kElementCount);
1002
1003 // If the object is null or undefined, skip over the loop, otherwise convert
1004 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 Label convert, done_convert;
1006 __ JumpIfSmi(a0, &convert);
1007 __ GetObjectType(a0, a1, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001008 __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1009 Operand(FIRST_JS_RECEIVER_TYPE));
1010 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot.
1011 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1012 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot.
1013 __ Branch(&exit, eq, a0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001014 __ bind(&convert);
1015 ToObjectStub stub(isolate());
1016 __ CallStub(&stub);
1017 __ mov(a0, v0);
1018 __ bind(&done_convert);
1019 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1020 __ push(a0);
1021
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001022 // Check cache validity in generated code. This is a fast case for
1023 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1024 // guarantee cache validity, call the runtime system to check cache
1025 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001026 // Note: Proxies never have an enum cache, so will always take the
1027 // slow path.
1028 Label call_runtime;
1029 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001030
1031 // The enum cache is valid. Load the map of the object being
1032 // iterated over and use the cache for the iteration.
1033 Label use_cache;
1034 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1035 __ Branch(&use_cache);
1036
1037 // Get the set of properties to enumerate.
1038 __ bind(&call_runtime);
1039 __ push(a0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001040 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001041 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1042
1043 // If we got a map from the runtime call, we can do a fast
1044 // modification check. Otherwise, we got a fixed array, and we have
1045 // to do a slow check.
1046 Label fixed_array;
1047 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1048 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1049 __ Branch(&fixed_array, ne, a2, Operand(at));
1050
1051 // We got a map in register v0. Get the enumeration cache from it.
1052 Label no_descriptors;
1053 __ bind(&use_cache);
1054
1055 __ EnumLength(a1, v0);
1056 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1057
1058 __ LoadInstanceDescriptors(v0, a2);
1059 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1060 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1061
1062 // Set up the four remaining stack slots.
1063 __ li(a0, Operand(Smi::FromInt(0)));
1064 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1065 __ Push(v0, a2, a1, a0);
1066 __ jmp(&loop);
1067
1068 __ bind(&no_descriptors);
1069 __ Drop(1);
1070 __ jmp(&exit);
1071
1072 // We got a fixed array in register v0. Iterate through that.
1073 __ bind(&fixed_array);
1074
Ben Murdoch097c5b22016-05-18 11:27:45 +01001075 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001076 __ EmitLoadTypeFeedbackVector(a1);
1077 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001078 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1079
1080 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1081 __ Push(a1, v0); // Smi and array
1082 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001083 __ Push(a1); // Fixed array length (as smi).
1084 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001085 __ li(a0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001086 __ Push(a0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087
1088 // Generate code for doing the condition check.
1089 __ bind(&loop);
1090 SetExpressionAsStatementPosition(stmt->each());
1091
1092 // Load the current count to a0, load the length to a1.
1093 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1094 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1095 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1096
1097 // Get the current entry of the array into register a3.
1098 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1099 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001100 __ Lsa(t0, a2, a0, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101 __ lw(a3, MemOperand(t0)); // Current entry.
1102
1103 // Get the expected map from the stack or a smi in the
1104 // permanent slow case into register a2.
1105 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1106
1107 // Check if the expected map still matches that of the enumerable.
1108 // If not, we may have to filter the key.
1109 Label update_each;
1110 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1111 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1112 __ Branch(&update_each, eq, t0, Operand(a2));
1113
Ben Murdoch097c5b22016-05-18 11:27:45 +01001114 // We might get here from TurboFan or Crankshaft when something in the
1115 // for-in loop body deopts and only now notice in fullcodegen, that we
1116 // can now longer use the enum cache, i.e. left fast mode. So better record
1117 // this information here, in case we later OSR back into this loop or
1118 // reoptimize the whole function w/o rerunning the loop with the slow
1119 // mode object in fullcodegen (which would result in a deopt loop).
1120 __ EmitLoadTypeFeedbackVector(a0);
1121 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1122 __ sw(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index)));
1123
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001124 // Convert the entry to a string or (smi) 0 if it isn't a property
1125 // any more. If the property has been removed while iterating, we
1126 // just skip it.
1127 __ Push(a1, a3); // Enumerable and current entry.
1128 __ CallRuntime(Runtime::kForInFilter);
1129 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1130 __ mov(a3, result_register());
1131 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1132 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1133
1134 // Update the 'each' property or variable from the possibly filtered
1135 // entry in register a3.
1136 __ bind(&update_each);
1137 __ mov(result_register(), a3);
1138 // Perform the assignment as if via '='.
1139 { EffectContext context(this);
1140 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1141 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1142 }
1143
1144 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1145 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1146 // Generate code for the body of the loop.
1147 Visit(stmt->body());
1148
1149 // Generate code for the going to the next element by incrementing
1150 // the index (smi) stored on top of the stack.
1151 __ bind(loop_statement.continue_label());
1152 __ pop(a0);
1153 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1154 __ push(a0);
1155
1156 EmitBackEdgeBookkeeping(stmt, &loop);
1157 __ Branch(&loop);
1158
1159 // Remove the pointers stored on the stack.
1160 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001161 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001162
1163 // Exit and decrement the loop depth.
1164 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1165 __ bind(&exit);
1166 decrement_loop_depth();
1167}
1168
1169
1170void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1171 bool pretenure) {
1172 // Use the fast case closure allocation code that allocates in new
1173 // space for nested functions that don't need literals cloning. If
1174 // we're running with the --always-opt or the --prepare-always-opt
1175 // flag, we need to use the runtime function so that the new function
1176 // we are creating here gets a chance to have its code optimized and
1177 // doesn't just get a copy of the existing unoptimized code.
1178 if (!FLAG_always_opt &&
1179 !FLAG_prepare_always_opt &&
1180 !pretenure &&
1181 scope()->is_function_scope() &&
1182 info->num_literals() == 0) {
1183 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1184 __ li(a2, Operand(info));
1185 __ CallStub(&stub);
1186 } else {
1187 __ Push(info);
1188 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1189 : Runtime::kNewClosure);
1190 }
1191 context()->Plug(v0);
1192}
1193
1194
1195void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1196 FeedbackVectorSlot slot) {
1197 DCHECK(NeedsHomeObject(initializer));
1198 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1199 __ li(StoreDescriptor::NameRegister(),
1200 Operand(isolate()->factory()->home_object_symbol()));
1201 __ lw(StoreDescriptor::ValueRegister(),
1202 MemOperand(sp, offset * kPointerSize));
1203 EmitLoadStoreICSlot(slot);
1204 CallStoreIC();
1205}
1206
1207
1208void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1209 int offset,
1210 FeedbackVectorSlot slot) {
1211 DCHECK(NeedsHomeObject(initializer));
1212 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1213 __ li(StoreDescriptor::NameRegister(),
1214 Operand(isolate()->factory()->home_object_symbol()));
1215 __ lw(StoreDescriptor::ValueRegister(),
1216 MemOperand(sp, offset * kPointerSize));
1217 EmitLoadStoreICSlot(slot);
1218 CallStoreIC();
1219}
1220
1221
1222void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1223 TypeofMode typeof_mode,
1224 Label* slow) {
1225 Register current = cp;
1226 Register next = a1;
1227 Register temp = a2;
1228
1229 Scope* s = scope();
1230 while (s != NULL) {
1231 if (s->num_heap_slots() > 0) {
1232 if (s->calls_sloppy_eval()) {
1233 // Check that extension is "the hole".
1234 __ lw(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1235 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1236 }
1237 // Load next context in chain.
1238 __ lw(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1239 // Walk the rest of the chain without clobbering cp.
1240 current = next;
1241 }
1242 // If no outer scope calls eval, we do not need to check more
1243 // context extensions.
1244 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1245 s = s->outer_scope();
1246 }
1247
1248 if (s->is_eval_scope()) {
1249 Label loop, fast;
1250 if (!current.is(next)) {
1251 __ Move(next, current);
1252 }
1253 __ bind(&loop);
1254 // Terminate at native context.
1255 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1256 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1257 __ Branch(&fast, eq, temp, Operand(t0));
1258 // Check that extension is "the hole".
1259 __ lw(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1260 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1261 // Load next context in chain.
1262 __ lw(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1263 __ Branch(&loop);
1264 __ bind(&fast);
1265 }
1266
1267 // All extension objects were empty and it is safe to use a normal global
1268 // load machinery.
1269 EmitGlobalVariableLoad(proxy, typeof_mode);
1270}
1271
1272
1273MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1274 Label* slow) {
1275 DCHECK(var->IsContextSlot());
1276 Register context = cp;
1277 Register next = a3;
1278 Register temp = t0;
1279
1280 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1281 if (s->num_heap_slots() > 0) {
1282 if (s->calls_sloppy_eval()) {
1283 // Check that extension is "the hole".
1284 __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1285 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1286 }
1287 __ lw(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1288 // Walk the rest of the chain without clobbering cp.
1289 context = next;
1290 }
1291 }
1292 // Check that last extension is "the hole".
1293 __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1294 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1295
1296 // This function is used only for loads, not stores, so it's safe to
1297 // return an cp-based operand (the write barrier cannot be allowed to
1298 // destroy the cp register).
1299 return ContextMemOperand(context, var->index());
1300}
1301
1302
1303void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1304 TypeofMode typeof_mode,
1305 Label* slow, Label* done) {
1306 // Generate fast-case code for variables that might be shadowed by
1307 // eval-introduced variables. Eval is used a lot without
1308 // introducing variables. In those cases, we do not want to
1309 // perform a runtime call for all variables in the scope
1310 // containing the eval.
1311 Variable* var = proxy->var();
1312 if (var->mode() == DYNAMIC_GLOBAL) {
1313 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1314 __ Branch(done);
1315 } else if (var->mode() == DYNAMIC_LOCAL) {
1316 Variable* local = var->local_if_not_shadowed();
1317 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1318 if (local->mode() == LET || local->mode() == CONST ||
1319 local->mode() == CONST_LEGACY) {
1320 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1321 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1322 if (local->mode() == CONST_LEGACY) {
1323 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1324 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1325 } else { // LET || CONST
1326 __ Branch(done, ne, at, Operand(zero_reg));
1327 __ li(a0, Operand(var->name()));
1328 __ push(a0);
1329 __ CallRuntime(Runtime::kThrowReferenceError);
1330 }
1331 }
1332 __ Branch(done);
1333 }
1334}
1335
1336
1337void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1338 TypeofMode typeof_mode) {
1339 Variable* var = proxy->var();
1340 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1341 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1342 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1343 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1344 __ li(LoadDescriptor::SlotRegister(),
1345 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1346 CallLoadIC(typeof_mode);
1347}
1348
1349
1350void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1351 TypeofMode typeof_mode) {
1352 // Record position before possible IC call.
1353 SetExpressionPosition(proxy);
1354 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1355 Variable* var = proxy->var();
1356
1357 // Three cases: global variables, lookup variables, and all other types of
1358 // variables.
1359 switch (var->location()) {
1360 case VariableLocation::GLOBAL:
1361 case VariableLocation::UNALLOCATED: {
1362 Comment cmnt(masm_, "[ Global variable");
1363 EmitGlobalVariableLoad(proxy, typeof_mode);
1364 context()->Plug(v0);
1365 break;
1366 }
1367
1368 case VariableLocation::PARAMETER:
1369 case VariableLocation::LOCAL:
1370 case VariableLocation::CONTEXT: {
1371 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1372 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1373 : "[ Stack variable");
1374 if (NeedsHoleCheckForLoad(proxy)) {
1375 // Let and const need a read barrier.
1376 GetVar(v0, var);
1377 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1378 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1379 if (var->mode() == LET || var->mode() == CONST) {
1380 // Throw a reference error when using an uninitialized let/const
1381 // binding in harmony mode.
1382 Label done;
1383 __ Branch(&done, ne, at, Operand(zero_reg));
1384 __ li(a0, Operand(var->name()));
1385 __ push(a0);
1386 __ CallRuntime(Runtime::kThrowReferenceError);
1387 __ bind(&done);
1388 } else {
1389 // Uninitialized legacy const bindings are unholed.
1390 DCHECK(var->mode() == CONST_LEGACY);
1391 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1392 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1393 }
1394 context()->Plug(v0);
1395 break;
1396 }
1397 context()->Plug(var);
1398 break;
1399 }
1400
1401 case VariableLocation::LOOKUP: {
1402 Comment cmnt(masm_, "[ Lookup variable");
1403 Label done, slow;
1404 // Generate code for loading from variables potentially shadowed
1405 // by eval-introduced variables.
1406 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1407 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001408 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001409 Runtime::FunctionId function_id =
1410 typeof_mode == NOT_INSIDE_TYPEOF
1411 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001412 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001413 __ CallRuntime(function_id);
1414 __ bind(&done);
1415 context()->Plug(v0);
1416 }
1417 }
1418}
1419
1420
1421void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1422 Comment cmnt(masm_, "[ RegExpLiteral");
1423 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1424 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1425 __ li(a1, Operand(expr->pattern()));
1426 __ li(a0, Operand(Smi::FromInt(expr->flags())));
1427 FastCloneRegExpStub stub(isolate());
1428 __ CallStub(&stub);
1429 context()->Plug(v0);
1430}
1431
1432
1433void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1434 Expression* expression = (property == NULL) ? NULL : property->value();
1435 if (expression == NULL) {
1436 __ LoadRoot(a1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001437 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001438 } else {
1439 VisitForStackValue(expression);
1440 if (NeedsHomeObject(expression)) {
1441 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1442 property->kind() == ObjectLiteral::Property::SETTER);
1443 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1444 EmitSetHomeObject(expression, offset, property->GetSlot());
1445 }
1446 }
1447}
1448
1449
1450void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1451 Comment cmnt(masm_, "[ ObjectLiteral");
1452
1453 Handle<FixedArray> constant_properties = expr->constant_properties();
1454 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1455 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1456 __ li(a1, Operand(constant_properties));
1457 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1458 if (MustCreateObjectLiteralWithRuntime(expr)) {
1459 __ Push(a3, a2, a1, a0);
1460 __ CallRuntime(Runtime::kCreateObjectLiteral);
1461 } else {
1462 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1463 __ CallStub(&stub);
1464 }
1465 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1466
1467 // If result_saved is true the result is on top of the stack. If
1468 // result_saved is false the result is in v0.
1469 bool result_saved = false;
1470
1471 AccessorTable accessor_table(zone());
1472 int property_index = 0;
1473 for (; property_index < expr->properties()->length(); property_index++) {
1474 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1475 if (property->is_computed_name()) break;
1476 if (property->IsCompileTimeValue()) continue;
1477
1478 Literal* key = property->key()->AsLiteral();
1479 Expression* value = property->value();
1480 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001481 PushOperand(v0); // Save result on stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001482 result_saved = true;
1483 }
1484 switch (property->kind()) {
1485 case ObjectLiteral::Property::CONSTANT:
1486 UNREACHABLE();
1487 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1488 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1489 // Fall through.
1490 case ObjectLiteral::Property::COMPUTED:
1491 // It is safe to use [[Put]] here because the boilerplate already
1492 // contains computed properties with an uninitialized value.
1493 if (key->value()->IsInternalizedString()) {
1494 if (property->emit_store()) {
1495 VisitForAccumulatorValue(value);
1496 __ mov(StoreDescriptor::ValueRegister(), result_register());
1497 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1498 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1499 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1500 EmitLoadStoreICSlot(property->GetSlot(0));
1501 CallStoreIC();
1502 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1503
1504 if (NeedsHomeObject(value)) {
1505 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1506 }
1507 } else {
1508 VisitForEffect(value);
1509 }
1510 break;
1511 }
1512 // Duplicate receiver on stack.
1513 __ lw(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001514 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001515 VisitForStackValue(key);
1516 VisitForStackValue(value);
1517 if (property->emit_store()) {
1518 if (NeedsHomeObject(value)) {
1519 EmitSetHomeObject(value, 2, property->GetSlot());
1520 }
1521 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001522 PushOperand(a0);
1523 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001524 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001525 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526 }
1527 break;
1528 case ObjectLiteral::Property::PROTOTYPE:
1529 // Duplicate receiver on stack.
1530 __ lw(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001531 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001532 VisitForStackValue(value);
1533 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001534 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1536 NO_REGISTERS);
1537 break;
1538 case ObjectLiteral::Property::GETTER:
1539 if (property->emit_store()) {
1540 accessor_table.lookup(key)->second->getter = property;
1541 }
1542 break;
1543 case ObjectLiteral::Property::SETTER:
1544 if (property->emit_store()) {
1545 accessor_table.lookup(key)->second->setter = property;
1546 }
1547 break;
1548 }
1549 }
1550
1551 // Emit code to define accessors, using only a single call to the runtime for
1552 // each pair of corresponding getters and setters.
1553 for (AccessorTable::Iterator it = accessor_table.begin();
1554 it != accessor_table.end();
1555 ++it) {
1556 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001557 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 VisitForStackValue(it->first);
1559 EmitAccessor(it->second->getter);
1560 EmitAccessor(it->second->setter);
1561 __ li(a0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001562 PushOperand(a0);
1563 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001564 }
1565
1566 // Object literals have two parts. The "static" part on the left contains no
1567 // computed property names, and so we can compute its map ahead of time; see
1568 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1569 // starts with the first computed property name, and continues with all
1570 // properties to its right. All the code from above initializes the static
1571 // component of the object literal, and arranges for the map of the result to
1572 // reflect the static order in which the keys appear. For the dynamic
1573 // properties, we compile them into a series of "SetOwnProperty" runtime
1574 // calls. This will preserve insertion order.
1575 for (; property_index < expr->properties()->length(); property_index++) {
1576 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1577
1578 Expression* value = property->value();
1579 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001580 PushOperand(v0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001581 result_saved = true;
1582 }
1583
1584 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001585 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001586
1587 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1588 DCHECK(!property->is_computed_name());
1589 VisitForStackValue(value);
1590 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001591 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1593 NO_REGISTERS);
1594 } else {
1595 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1596 VisitForStackValue(value);
1597 if (NeedsHomeObject(value)) {
1598 EmitSetHomeObject(value, 2, property->GetSlot());
1599 }
1600
1601 switch (property->kind()) {
1602 case ObjectLiteral::Property::CONSTANT:
1603 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1604 case ObjectLiteral::Property::COMPUTED:
1605 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001606 PushOperand(Smi::FromInt(NONE));
1607 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1608 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001610 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001611 }
1612 break;
1613
1614 case ObjectLiteral::Property::PROTOTYPE:
1615 UNREACHABLE();
1616 break;
1617
1618 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001619 PushOperand(Smi::FromInt(NONE));
1620 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001621 break;
1622
1623 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001624 PushOperand(Smi::FromInt(NONE));
1625 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001626 break;
1627 }
1628 }
1629 }
1630
1631 if (expr->has_function()) {
1632 DCHECK(result_saved);
1633 __ lw(a0, MemOperand(sp));
1634 __ push(a0);
1635 __ CallRuntime(Runtime::kToFastProperties);
1636 }
1637
1638 if (result_saved) {
1639 context()->PlugTOS();
1640 } else {
1641 context()->Plug(v0);
1642 }
1643}
1644
1645
1646void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1647 Comment cmnt(masm_, "[ ArrayLiteral");
1648
1649 Handle<FixedArray> constant_elements = expr->constant_elements();
1650 bool has_fast_elements =
1651 IsFastObjectElementsKind(expr->constant_elements_kind());
1652
1653 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1654 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1655 // If the only customer of allocation sites is transitioning, then
1656 // we can turn it off if we don't have anywhere else to transition to.
1657 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1658 }
1659
1660 __ mov(a0, result_register());
1661 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1662 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1663 __ li(a1, Operand(constant_elements));
1664 if (MustCreateArrayLiteralWithRuntime(expr)) {
1665 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1666 __ Push(a3, a2, a1, a0);
1667 __ CallRuntime(Runtime::kCreateArrayLiteral);
1668 } else {
1669 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1670 __ CallStub(&stub);
1671 }
1672 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1673
1674 bool result_saved = false; // Is the result saved to the stack?
1675 ZoneList<Expression*>* subexprs = expr->values();
1676 int length = subexprs->length();
1677
1678 // Emit code to evaluate all the non-constant subexpressions and to store
1679 // them into the newly cloned array.
1680 int array_index = 0;
1681 for (; array_index < length; array_index++) {
1682 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001683 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001684
1685 // If the subexpression is a literal or a simple materialized literal it
1686 // is already set in the cloned array.
1687 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1688
1689 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001690 PushOperand(v0); // array literal
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001691 result_saved = true;
1692 }
1693
1694 VisitForAccumulatorValue(subexpr);
1695
1696 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1697 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1698 __ mov(StoreDescriptor::ValueRegister(), result_register());
1699 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1700 Handle<Code> ic =
1701 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1702 CallIC(ic);
1703
1704 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1705 }
1706
1707 // In case the array literal contains spread expressions it has two parts. The
1708 // first part is the "static" array which has a literal index is handled
1709 // above. The second part is the part after the first spread expression
1710 // (inclusive) and these elements gets appended to the array. Note that the
1711 // number elements an iterable produces is unknown ahead of time.
1712 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001713 PopOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001714 result_saved = false;
1715 }
1716 for (; array_index < length; array_index++) {
1717 Expression* subexpr = subexprs->at(array_index);
1718
Ben Murdoch097c5b22016-05-18 11:27:45 +01001719 PushOperand(v0);
1720 DCHECK(!subexpr->IsSpread());
1721 VisitForStackValue(subexpr);
1722 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001723
1724 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1725 }
1726
1727 if (result_saved) {
1728 context()->PlugTOS();
1729 } else {
1730 context()->Plug(v0);
1731 }
1732}
1733
1734
1735void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1736 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1737
1738 Comment cmnt(masm_, "[ Assignment");
1739 SetExpressionPosition(expr, INSERT_BREAK);
1740
1741 Property* property = expr->target()->AsProperty();
1742 LhsKind assign_type = Property::GetAssignType(property);
1743
1744 // Evaluate LHS expression.
1745 switch (assign_type) {
1746 case VARIABLE:
1747 // Nothing to do here.
1748 break;
1749 case NAMED_PROPERTY:
1750 if (expr->is_compound()) {
1751 // We need the receiver both on the stack and in the register.
1752 VisitForStackValue(property->obj());
1753 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1754 } else {
1755 VisitForStackValue(property->obj());
1756 }
1757 break;
1758 case NAMED_SUPER_PROPERTY:
1759 VisitForStackValue(
1760 property->obj()->AsSuperPropertyReference()->this_var());
1761 VisitForAccumulatorValue(
1762 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001763 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764 if (expr->is_compound()) {
1765 const Register scratch = a1;
1766 __ lw(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001767 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001768 }
1769 break;
1770 case KEYED_SUPER_PROPERTY: {
1771 const Register scratch = a1;
1772 VisitForStackValue(
1773 property->obj()->AsSuperPropertyReference()->this_var());
1774 VisitForAccumulatorValue(
1775 property->obj()->AsSuperPropertyReference()->home_object());
1776 __ Move(scratch, result_register());
1777 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001778 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001779 if (expr->is_compound()) {
1780 const Register scratch1 = t0;
1781 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001782 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001783 }
1784 break;
1785 }
1786 case KEYED_PROPERTY:
1787 // We need the key and receiver on both the stack and in v0 and a1.
1788 if (expr->is_compound()) {
1789 VisitForStackValue(property->obj());
1790 VisitForStackValue(property->key());
1791 __ lw(LoadDescriptor::ReceiverRegister(),
1792 MemOperand(sp, 1 * kPointerSize));
1793 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1794 } else {
1795 VisitForStackValue(property->obj());
1796 VisitForStackValue(property->key());
1797 }
1798 break;
1799 }
1800
1801 // For compound assignments we need another deoptimization point after the
1802 // variable/property load.
1803 if (expr->is_compound()) {
1804 { AccumulatorValueContext context(this);
1805 switch (assign_type) {
1806 case VARIABLE:
1807 EmitVariableLoad(expr->target()->AsVariableProxy());
1808 PrepareForBailout(expr->target(), TOS_REG);
1809 break;
1810 case NAMED_PROPERTY:
1811 EmitNamedPropertyLoad(property);
1812 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1813 break;
1814 case NAMED_SUPER_PROPERTY:
1815 EmitNamedSuperPropertyLoad(property);
1816 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1817 break;
1818 case KEYED_SUPER_PROPERTY:
1819 EmitKeyedSuperPropertyLoad(property);
1820 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1821 break;
1822 case KEYED_PROPERTY:
1823 EmitKeyedPropertyLoad(property);
1824 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1825 break;
1826 }
1827 }
1828
1829 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001830 PushOperand(v0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001831 VisitForAccumulatorValue(expr->value());
1832
1833 AccumulatorValueContext context(this);
1834 if (ShouldInlineSmiCase(op)) {
1835 EmitInlineSmiBinaryOp(expr->binary_operation(),
1836 op,
1837 expr->target(),
1838 expr->value());
1839 } else {
1840 EmitBinaryOp(expr->binary_operation(), op);
1841 }
1842
1843 // Deoptimization point in case the binary operation may have side effects.
1844 PrepareForBailout(expr->binary_operation(), TOS_REG);
1845 } else {
1846 VisitForAccumulatorValue(expr->value());
1847 }
1848
1849 SetExpressionPosition(expr);
1850
1851 // Store the value.
1852 switch (assign_type) {
1853 case VARIABLE:
1854 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1855 expr->op(), expr->AssignmentSlot());
1856 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1857 context()->Plug(v0);
1858 break;
1859 case NAMED_PROPERTY:
1860 EmitNamedPropertyAssignment(expr);
1861 break;
1862 case NAMED_SUPER_PROPERTY:
1863 EmitNamedSuperPropertyStore(property);
1864 context()->Plug(v0);
1865 break;
1866 case KEYED_SUPER_PROPERTY:
1867 EmitKeyedSuperPropertyStore(property);
1868 context()->Plug(v0);
1869 break;
1870 case KEYED_PROPERTY:
1871 EmitKeyedPropertyAssignment(expr);
1872 break;
1873 }
1874}
1875
1876
1877void FullCodeGenerator::VisitYield(Yield* expr) {
1878 Comment cmnt(masm_, "[ Yield");
1879 SetExpressionPosition(expr);
1880
1881 // Evaluate yielded value first; the initial iterator definition depends on
1882 // this. It stays on the stack while we update the iterator.
1883 VisitForStackValue(expr->expression());
1884
1885 switch (expr->yield_kind()) {
1886 case Yield::kSuspend:
1887 // Pop value from top-of-stack slot; box result into result register.
1888 EmitCreateIteratorResult(false);
1889 __ push(result_register());
1890 // Fall through.
1891 case Yield::kInitial: {
1892 Label suspend, continuation, post_runtime, resume;
1893
1894 __ jmp(&suspend);
1895 __ bind(&continuation);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001896 // When we arrive here, the stack top is the resume mode and
1897 // result_register() holds the input value (the argument given to the
1898 // respective resume operation).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001899 __ RecordGeneratorContinuation();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001900 __ pop(a1);
1901 __ Branch(&resume, ne, a1,
1902 Operand(Smi::FromInt(JSGeneratorObject::RETURN)));
1903 __ push(result_register());
1904 EmitCreateIteratorResult(true);
1905 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001906
1907 __ bind(&suspend);
1908 VisitForAccumulatorValue(expr->generator_object());
1909 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1910 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1911 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1912 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1913 __ mov(a1, cp);
1914 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1915 kRAHasBeenSaved, kDontSaveFPRegs);
1916 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1917 __ Branch(&post_runtime, eq, sp, Operand(a1));
1918 __ push(v0); // generator object
1919 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1920 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1921 __ bind(&post_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001922 PopOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001923 EmitReturnSequence();
1924
1925 __ bind(&resume);
1926 context()->Plug(result_register());
1927 break;
1928 }
1929
1930 case Yield::kFinal: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001931 // Pop value from top-of-stack slot, box result into result register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001932 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001933 EmitCreateIteratorResult(true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001934 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001935 break;
1936 }
1937
Ben Murdoch097c5b22016-05-18 11:27:45 +01001938 case Yield::kDelegating:
1939 UNREACHABLE();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001940 }
1941}
1942
1943
1944void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1945 Expression *value,
1946 JSGeneratorObject::ResumeMode resume_mode) {
1947 // The value stays in a0, and is ultimately read by the resumed generator, as
1948 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1949 // is read to throw the value when the resumed generator is already closed.
1950 // a1 will hold the generator object until the activation has been resumed.
1951 VisitForStackValue(generator);
1952 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001953 PopOperand(a1);
1954
1955 // Store input value into generator object.
1956 __ sw(result_register(),
1957 FieldMemOperand(a1, JSGeneratorObject::kInputOffset));
1958 __ mov(a2, result_register());
1959 __ RecordWriteField(a1, JSGeneratorObject::kInputOffset, a2, a3,
1960 kRAHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001961
1962 // Load suspended function and context.
1963 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
1964 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
1965
1966 // Load receiver and store as the first argument.
1967 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
1968 __ push(a2);
1969
1970 // Push holes for the rest of the arguments to the generator function.
1971 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
1972 __ lw(a3,
1973 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1974 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
1975 Label push_argument_holes, push_frame;
1976 __ bind(&push_argument_holes);
1977 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
1978 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
1979 __ push(a2);
1980 __ jmp(&push_argument_holes);
1981
1982 // Enter a new JavaScript frame, and initialize its slots as they were when
1983 // the generator was suspended.
1984 Label resume_frame, done;
1985 __ bind(&push_frame);
1986 __ Call(&resume_frame);
1987 __ jmp(&done);
1988 __ bind(&resume_frame);
1989 // ra = return address.
1990 // fp = caller's frame pointer.
1991 // cp = callee's context,
1992 // t0 = callee's JS function.
1993 __ Push(ra, fp, cp, t0);
1994 // Adjust FP to point to saved FP.
1995 __ Addu(fp, sp, 2 * kPointerSize);
1996
1997 // Load the operand stack size.
1998 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
1999 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2000 __ SmiUntag(a3);
2001
2002 // If we are sending a value and there is no operand stack, we can jump back
2003 // in directly.
2004 if (resume_mode == JSGeneratorObject::NEXT) {
2005 Label slow_resume;
2006 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2007 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2008 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2009 __ SmiUntag(a2);
2010 __ Addu(a3, a3, Operand(a2));
2011 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2012 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002013 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002014 __ Jump(a3);
2015 __ bind(&slow_resume);
2016 }
2017
2018 // Otherwise, we push holes for the operand stack and call the runtime to fix
2019 // up the stack and the handlers.
2020 Label push_operand_holes, call_resume;
2021 __ bind(&push_operand_holes);
2022 __ Subu(a3, a3, Operand(1));
2023 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2024 __ push(a2);
2025 __ Branch(&push_operand_holes);
2026 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002027 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002028 DCHECK(!result_register().is(a1));
2029 __ Push(a1, result_register());
2030 __ Push(Smi::FromInt(resume_mode));
2031 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2032 // Not reached: the runtime call returns elsewhere.
2033 __ stop("not-reached");
2034
2035 __ bind(&done);
2036 context()->Plug(result_register());
2037}
2038
Ben Murdoch097c5b22016-05-18 11:27:45 +01002039void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
2040 OperandStackDepthIncrement(2);
2041 __ Push(reg1, reg2);
2042}
2043
2044void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
2045 Register reg3) {
2046 OperandStackDepthIncrement(3);
2047 __ Push(reg1, reg2, reg3);
2048}
2049
2050void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
2051 Register reg3, Register reg4) {
2052 OperandStackDepthIncrement(4);
2053 __ Push(reg1, reg2, reg3, reg4);
2054}
2055
2056void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
2057 OperandStackDepthDecrement(2);
2058 __ Pop(reg1, reg2);
2059}
2060
2061void FullCodeGenerator::EmitOperandStackDepthCheck() {
2062 if (FLAG_debug_code) {
2063 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
2064 operand_stack_depth_ * kPointerSize;
2065 __ Subu(v0, fp, sp);
2066 __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
2067 }
2068}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002069
2070void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2071 Label allocate, done_allocate;
2072
2073 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate, TAG_OBJECT);
2074 __ jmp(&done_allocate);
2075
2076 __ bind(&allocate);
2077 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2078 __ CallRuntime(Runtime::kAllocateInNewSpace);
2079
2080 __ bind(&done_allocate);
2081 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
2082 __ pop(a2);
2083 __ LoadRoot(a3,
2084 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2085 __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
2086 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2087 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2088 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2089 __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2090 __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2091 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2092}
2093
2094
2095void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2096 SetExpressionPosition(prop);
2097 Literal* key = prop->key()->AsLiteral();
2098 DCHECK(!prop->IsSuperAccess());
2099
2100 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2101 __ li(LoadDescriptor::SlotRegister(),
2102 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002103 CallLoadIC(NOT_INSIDE_TYPEOF);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002104}
2105
2106
2107void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2108 Token::Value op,
2109 Expression* left_expr,
2110 Expression* right_expr) {
2111 Label done, smi_case, stub_call;
2112
2113 Register scratch1 = a2;
2114 Register scratch2 = a3;
2115
2116 // Get the arguments.
2117 Register left = a1;
2118 Register right = a0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002119 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002120 __ mov(a0, result_register());
2121
2122 // Perform combined smi check on both operands.
2123 __ Or(scratch1, left, Operand(right));
2124 STATIC_ASSERT(kSmiTag == 0);
2125 JumpPatchSite patch_site(masm_);
2126 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2127
2128 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002129 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002130 CallIC(code, expr->BinaryOperationFeedbackId());
2131 patch_site.EmitPatchInfo();
2132 __ jmp(&done);
2133
2134 __ bind(&smi_case);
2135 // Smi case. This code works the same way as the smi-smi case in the type
2136 // recording binary operation stub, see
2137 switch (op) {
2138 case Token::SAR:
2139 __ GetLeastBitsFromSmi(scratch1, right, 5);
2140 __ srav(right, left, scratch1);
2141 __ And(v0, right, Operand(~kSmiTagMask));
2142 break;
2143 case Token::SHL: {
2144 __ SmiUntag(scratch1, left);
2145 __ GetLeastBitsFromSmi(scratch2, right, 5);
2146 __ sllv(scratch1, scratch1, scratch2);
2147 __ Addu(scratch2, scratch1, Operand(0x40000000));
2148 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2149 __ SmiTag(v0, scratch1);
2150 break;
2151 }
2152 case Token::SHR: {
2153 __ SmiUntag(scratch1, left);
2154 __ GetLeastBitsFromSmi(scratch2, right, 5);
2155 __ srlv(scratch1, scratch1, scratch2);
2156 __ And(scratch2, scratch1, 0xc0000000);
2157 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2158 __ SmiTag(v0, scratch1);
2159 break;
2160 }
2161 case Token::ADD:
2162 __ AddBranchOvf(v0, left, Operand(right), &stub_call);
2163 break;
2164 case Token::SUB:
2165 __ SubBranchOvf(v0, left, Operand(right), &stub_call);
2166 break;
2167 case Token::MUL: {
2168 __ SmiUntag(scratch1, right);
2169 __ Mul(scratch2, v0, left, scratch1);
2170 __ sra(scratch1, v0, 31);
2171 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2172 __ Branch(&done, ne, v0, Operand(zero_reg));
2173 __ Addu(scratch2, right, left);
2174 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2175 DCHECK(Smi::FromInt(0) == 0);
2176 __ mov(v0, zero_reg);
2177 break;
2178 }
2179 case Token::BIT_OR:
2180 __ Or(v0, left, Operand(right));
2181 break;
2182 case Token::BIT_AND:
2183 __ And(v0, left, Operand(right));
2184 break;
2185 case Token::BIT_XOR:
2186 __ Xor(v0, left, Operand(right));
2187 break;
2188 default:
2189 UNREACHABLE();
2190 }
2191
2192 __ bind(&done);
2193 context()->Plug(v0);
2194}
2195
2196
2197void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002198 for (int i = 0; i < lit->properties()->length(); i++) {
2199 ObjectLiteral::Property* property = lit->properties()->at(i);
2200 Expression* value = property->value();
2201
Ben Murdoch097c5b22016-05-18 11:27:45 +01002202 Register scratch = a1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002203 if (property->is_static()) {
2204 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
2205 } else {
2206 __ lw(scratch, MemOperand(sp, 0)); // prototype
2207 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002208 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002209 EmitPropertyKey(property, lit->GetIdForProperty(i));
2210
2211 // The static prototype property is read only. We handle the non computed
2212 // property name case in the parser. Since this is the only case where we
2213 // need to check for an own read only property we special case this so we do
2214 // not need to do this for every property.
2215 if (property->is_static() && property->is_computed_name()) {
2216 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2217 __ push(v0);
2218 }
2219
2220 VisitForStackValue(value);
2221 if (NeedsHomeObject(value)) {
2222 EmitSetHomeObject(value, 2, property->GetSlot());
2223 }
2224
2225 switch (property->kind()) {
2226 case ObjectLiteral::Property::CONSTANT:
2227 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2228 case ObjectLiteral::Property::PROTOTYPE:
2229 UNREACHABLE();
2230 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002231 PushOperand(Smi::FromInt(DONT_ENUM));
2232 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2233 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002234 break;
2235
2236 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002237 PushOperand(Smi::FromInt(DONT_ENUM));
2238 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002239 break;
2240
2241 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002242 PushOperand(Smi::FromInt(DONT_ENUM));
2243 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002244 break;
2245
2246 default:
2247 UNREACHABLE();
2248 }
2249 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002250}
2251
2252
2253void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2254 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002255 PopOperand(a1);
2256 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2258 CallIC(code, expr->BinaryOperationFeedbackId());
2259 patch_site.EmitPatchInfo();
2260 context()->Plug(v0);
2261}
2262
2263
2264void FullCodeGenerator::EmitAssignment(Expression* expr,
2265 FeedbackVectorSlot slot) {
2266 DCHECK(expr->IsValidReferenceExpressionOrThis());
2267
2268 Property* prop = expr->AsProperty();
2269 LhsKind assign_type = Property::GetAssignType(prop);
2270
2271 switch (assign_type) {
2272 case VARIABLE: {
2273 Variable* var = expr->AsVariableProxy()->var();
2274 EffectContext context(this);
2275 EmitVariableAssignment(var, Token::ASSIGN, slot);
2276 break;
2277 }
2278 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002279 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002280 VisitForAccumulatorValue(prop->obj());
2281 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002282 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002283 __ li(StoreDescriptor::NameRegister(),
2284 Operand(prop->key()->AsLiteral()->value()));
2285 EmitLoadStoreICSlot(slot);
2286 CallStoreIC();
2287 break;
2288 }
2289 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002290 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002291 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2292 VisitForAccumulatorValue(
2293 prop->obj()->AsSuperPropertyReference()->home_object());
2294 // stack: value, this; v0: home_object
2295 Register scratch = a2;
2296 Register scratch2 = a3;
2297 __ mov(scratch, result_register()); // home_object
2298 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2299 __ lw(scratch2, MemOperand(sp, 0)); // this
2300 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2301 __ sw(scratch, MemOperand(sp, 0)); // home_object
2302 // stack: this, home_object; v0: value
2303 EmitNamedSuperPropertyStore(prop);
2304 break;
2305 }
2306 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002307 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002308 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2309 VisitForStackValue(
2310 prop->obj()->AsSuperPropertyReference()->home_object());
2311 VisitForAccumulatorValue(prop->key());
2312 Register scratch = a2;
2313 Register scratch2 = a3;
2314 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2315 // stack: value, this, home_object; v0: key, a3: value
2316 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2317 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2318 __ lw(scratch, MemOperand(sp, 0)); // home_object
2319 __ sw(scratch, MemOperand(sp, kPointerSize));
2320 __ sw(v0, MemOperand(sp, 0));
2321 __ Move(v0, scratch2);
2322 // stack: this, home_object, key; v0: value.
2323 EmitKeyedSuperPropertyStore(prop);
2324 break;
2325 }
2326 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002327 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002328 VisitForStackValue(prop->obj());
2329 VisitForAccumulatorValue(prop->key());
2330 __ mov(StoreDescriptor::NameRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002331 PopOperands(StoreDescriptor::ValueRegister(),
2332 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002333 EmitLoadStoreICSlot(slot);
2334 Handle<Code> ic =
2335 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2336 CallIC(ic);
2337 break;
2338 }
2339 }
2340 context()->Plug(v0);
2341}
2342
2343
2344void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2345 Variable* var, MemOperand location) {
2346 __ sw(result_register(), location);
2347 if (var->IsContextSlot()) {
2348 // RecordWrite may destroy all its register arguments.
2349 __ Move(a3, result_register());
2350 int offset = Context::SlotOffset(var->index());
2351 __ RecordWriteContextSlot(
2352 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2353 }
2354}
2355
2356
2357void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2358 FeedbackVectorSlot slot) {
2359 if (var->IsUnallocated()) {
2360 // Global var, const, or let.
2361 __ mov(StoreDescriptor::ValueRegister(), result_register());
2362 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2363 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2364 EmitLoadStoreICSlot(slot);
2365 CallStoreIC();
2366
2367 } else if (var->mode() == LET && op != Token::INIT) {
2368 // Non-initializing assignment to let variable needs a write barrier.
2369 DCHECK(!var->IsLookupSlot());
2370 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2371 Label assign;
2372 MemOperand location = VarOperand(var, a1);
2373 __ lw(a3, location);
2374 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2375 __ Branch(&assign, ne, a3, Operand(t0));
2376 __ li(a3, Operand(var->name()));
2377 __ push(a3);
2378 __ CallRuntime(Runtime::kThrowReferenceError);
2379 // Perform the assignment.
2380 __ bind(&assign);
2381 EmitStoreToStackLocalOrContextSlot(var, location);
2382
2383 } else if (var->mode() == CONST && op != Token::INIT) {
2384 // Assignment to const variable needs a write barrier.
2385 DCHECK(!var->IsLookupSlot());
2386 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2387 Label const_error;
2388 MemOperand location = VarOperand(var, a1);
2389 __ lw(a3, location);
2390 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2391 __ Branch(&const_error, ne, a3, Operand(at));
2392 __ li(a3, Operand(var->name()));
2393 __ push(a3);
2394 __ CallRuntime(Runtime::kThrowReferenceError);
2395 __ bind(&const_error);
2396 __ CallRuntime(Runtime::kThrowConstAssignError);
2397
2398 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2399 // Initializing assignment to const {this} needs a write barrier.
2400 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2401 Label uninitialized_this;
2402 MemOperand location = VarOperand(var, a1);
2403 __ lw(a3, location);
2404 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2405 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2406 __ li(a0, Operand(var->name()));
2407 __ Push(a0);
2408 __ CallRuntime(Runtime::kThrowReferenceError);
2409 __ bind(&uninitialized_this);
2410 EmitStoreToStackLocalOrContextSlot(var, location);
2411
2412 } else if (!var->is_const_mode() ||
2413 (var->mode() == CONST && op == Token::INIT)) {
2414 if (var->IsLookupSlot()) {
2415 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002416 __ Push(var->name());
2417 __ Push(v0);
2418 __ CallRuntime(is_strict(language_mode())
2419 ? Runtime::kStoreLookupSlot_Strict
2420 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002421 } else {
2422 // Assignment to var or initializing assignment to let/const in harmony
2423 // mode.
2424 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2425 MemOperand location = VarOperand(var, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002426 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002427 // Check for an uninitialized let binding.
2428 __ lw(a2, location);
2429 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2430 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2431 }
2432 EmitStoreToStackLocalOrContextSlot(var, location);
2433 }
2434
2435 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2436 // Const initializers need a write barrier.
2437 DCHECK(!var->IsParameter()); // No const parameters.
2438 if (var->IsLookupSlot()) {
2439 __ li(a0, Operand(var->name()));
2440 __ Push(v0, cp, a0); // Context and name.
2441 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2442 } else {
2443 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2444 Label skip;
2445 MemOperand location = VarOperand(var, a1);
2446 __ lw(a2, location);
2447 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2448 __ Branch(&skip, ne, a2, Operand(at));
2449 EmitStoreToStackLocalOrContextSlot(var, location);
2450 __ bind(&skip);
2451 }
2452
2453 } else {
2454 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2455 if (is_strict(language_mode())) {
2456 __ CallRuntime(Runtime::kThrowConstAssignError);
2457 }
2458 // Silently ignore store in sloppy mode.
2459 }
2460}
2461
2462
2463void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2464 // Assignment to a property, using a named store IC.
2465 Property* prop = expr->target()->AsProperty();
2466 DCHECK(prop != NULL);
2467 DCHECK(prop->key()->IsLiteral());
2468
2469 __ mov(StoreDescriptor::ValueRegister(), result_register());
2470 __ li(StoreDescriptor::NameRegister(),
2471 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002472 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002473 EmitLoadStoreICSlot(expr->AssignmentSlot());
2474 CallStoreIC();
2475
2476 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2477 context()->Plug(v0);
2478}
2479
2480
2481void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2482 // Assignment to named property of super.
2483 // v0 : value
2484 // stack : receiver ('this'), home_object
2485 DCHECK(prop != NULL);
2486 Literal* key = prop->key()->AsLiteral();
2487 DCHECK(key != NULL);
2488
Ben Murdoch097c5b22016-05-18 11:27:45 +01002489 PushOperand(key->value());
2490 PushOperand(v0);
2491 CallRuntimeWithOperands(is_strict(language_mode())
2492 ? Runtime::kStoreToSuper_Strict
2493 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002494}
2495
2496
2497void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2498 // Assignment to named property of super.
2499 // v0 : value
2500 // stack : receiver ('this'), home_object, key
2501 DCHECK(prop != NULL);
2502
Ben Murdoch097c5b22016-05-18 11:27:45 +01002503 PushOperand(v0);
2504 CallRuntimeWithOperands(is_strict(language_mode())
2505 ? Runtime::kStoreKeyedToSuper_Strict
2506 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002507}
2508
2509
2510void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2511 // Assignment to a property, using a keyed store IC.
2512 // Call keyed store IC.
2513 // The arguments are:
2514 // - a0 is the value,
2515 // - a1 is the key,
2516 // - a2 is the receiver.
2517 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002518 PopOperands(StoreDescriptor::ReceiverRegister(),
2519 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002520 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2521
2522 Handle<Code> ic =
2523 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2524 EmitLoadStoreICSlot(expr->AssignmentSlot());
2525 CallIC(ic);
2526
2527 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2528 context()->Plug(v0);
2529}
2530
2531
2532void FullCodeGenerator::VisitProperty(Property* expr) {
2533 Comment cmnt(masm_, "[ Property");
2534 SetExpressionPosition(expr);
2535
2536 Expression* key = expr->key();
2537
2538 if (key->IsPropertyName()) {
2539 if (!expr->IsSuperAccess()) {
2540 VisitForAccumulatorValue(expr->obj());
2541 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2542 EmitNamedPropertyLoad(expr);
2543 } else {
2544 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2545 VisitForStackValue(
2546 expr->obj()->AsSuperPropertyReference()->home_object());
2547 EmitNamedSuperPropertyLoad(expr);
2548 }
2549 } else {
2550 if (!expr->IsSuperAccess()) {
2551 VisitForStackValue(expr->obj());
2552 VisitForAccumulatorValue(expr->key());
2553 __ Move(LoadDescriptor::NameRegister(), v0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002554 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002555 EmitKeyedPropertyLoad(expr);
2556 } else {
2557 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2558 VisitForStackValue(
2559 expr->obj()->AsSuperPropertyReference()->home_object());
2560 VisitForStackValue(expr->key());
2561 EmitKeyedSuperPropertyLoad(expr);
2562 }
2563 }
2564 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2565 context()->Plug(v0);
2566}
2567
2568
2569void FullCodeGenerator::CallIC(Handle<Code> code,
2570 TypeFeedbackId id) {
2571 ic_total_count_++;
2572 __ Call(code, RelocInfo::CODE_TARGET, id);
2573}
2574
2575
2576// Code common for calls using the IC.
2577void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2578 Expression* callee = expr->expression();
2579
2580 // Get the target function.
2581 ConvertReceiverMode convert_mode;
2582 if (callee->IsVariableProxy()) {
2583 { StackValueContext context(this);
2584 EmitVariableLoad(callee->AsVariableProxy());
2585 PrepareForBailout(callee, NO_REGISTERS);
2586 }
2587 // Push undefined as receiver. This is patched in the method prologue if it
2588 // is a sloppy mode method.
2589 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002590 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002591 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2592 } else {
2593 // Load the function from the receiver.
2594 DCHECK(callee->IsProperty());
2595 DCHECK(!callee->AsProperty()->IsSuperAccess());
2596 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2597 EmitNamedPropertyLoad(callee->AsProperty());
2598 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2599 // Push the target function under the receiver.
2600 __ lw(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002601 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002602 __ sw(v0, MemOperand(sp, kPointerSize));
2603 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2604 }
2605
2606 EmitCall(expr, convert_mode);
2607}
2608
2609
2610void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2611 SetExpressionPosition(expr);
2612 Expression* callee = expr->expression();
2613 DCHECK(callee->IsProperty());
2614 Property* prop = callee->AsProperty();
2615 DCHECK(prop->IsSuperAccess());
2616
2617 Literal* key = prop->key()->AsLiteral();
2618 DCHECK(!key->value()->IsSmi());
2619 // Load the function from the receiver.
2620 const Register scratch = a1;
2621 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2622 VisitForAccumulatorValue(super_ref->home_object());
2623 __ mov(scratch, v0);
2624 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002625 PushOperands(scratch, v0, v0, scratch);
2626 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002627
2628 // Stack here:
2629 // - home_object
2630 // - this (receiver)
2631 // - this (receiver) <-- LoadFromSuper will pop here and below.
2632 // - home_object
2633 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002634 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002635
2636 // Replace home_object with target function.
2637 __ sw(v0, MemOperand(sp, kPointerSize));
2638
2639 // Stack here:
2640 // - target function
2641 // - this (receiver)
2642 EmitCall(expr);
2643}
2644
2645
2646// Code common for calls using the IC.
2647void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2648 Expression* key) {
2649 // Load the key.
2650 VisitForAccumulatorValue(key);
2651
2652 Expression* callee = expr->expression();
2653
2654 // Load the function from the receiver.
2655 DCHECK(callee->IsProperty());
2656 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2657 __ Move(LoadDescriptor::NameRegister(), v0);
2658 EmitKeyedPropertyLoad(callee->AsProperty());
2659 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2660
2661 // Push the target function under the receiver.
2662 __ lw(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002663 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002664 __ sw(v0, MemOperand(sp, kPointerSize));
2665
2666 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2667}
2668
2669
2670void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2671 Expression* callee = expr->expression();
2672 DCHECK(callee->IsProperty());
2673 Property* prop = callee->AsProperty();
2674 DCHECK(prop->IsSuperAccess());
2675
2676 SetExpressionPosition(prop);
2677 // Load the function from the receiver.
2678 const Register scratch = a1;
2679 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2680 VisitForAccumulatorValue(super_ref->home_object());
2681 __ Move(scratch, v0);
2682 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002683 PushOperands(scratch, v0, v0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002684 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002685
2686 // Stack here:
2687 // - home_object
2688 // - this (receiver)
2689 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2690 // - home_object
2691 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002692 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002693
2694 // Replace home_object with target function.
2695 __ sw(v0, MemOperand(sp, kPointerSize));
2696
2697 // Stack here:
2698 // - target function
2699 // - this (receiver)
2700 EmitCall(expr);
2701}
2702
2703
2704void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2705 // Load the arguments.
2706 ZoneList<Expression*>* args = expr->arguments();
2707 int arg_count = args->length();
2708 for (int i = 0; i < arg_count; i++) {
2709 VisitForStackValue(args->at(i));
2710 }
2711
2712 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2713 // Record source position of the IC call.
2714 SetCallPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002715 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2716 if (FLAG_trace) {
2717 __ CallRuntime(Runtime::kTraceTailCall);
2718 }
2719 // Update profiling counters before the tail call since we will
2720 // not return to this function.
2721 EmitProfilingCounterHandlingForReturnSequence(true);
2722 }
2723 Handle<Code> ic =
2724 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2725 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002726 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2727 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2728 // Don't assign a type feedback id to the IC, since type feedback is provided
2729 // by the vector above.
2730 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002731 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002732
2733 RecordJSReturnSite(expr);
2734 // Restore context register.
2735 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2736 context()->DropAndPlug(1, v0);
2737}
2738
2739
2740void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2741 // t3: copy of the first argument or undefined if it doesn't exist.
2742 if (arg_count > 0) {
2743 __ lw(t3, MemOperand(sp, arg_count * kPointerSize));
2744 } else {
2745 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
2746 }
2747
2748 // t2: the receiver of the enclosing function.
2749 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2750
2751 // t1: the language mode.
2752 __ li(t1, Operand(Smi::FromInt(language_mode())));
2753
2754 // t0: the start position of the scope the calls resides in.
2755 __ li(t0, Operand(Smi::FromInt(scope()->start_position())));
2756
2757 // Do the runtime call.
2758 __ Push(t3, t2, t1, t0);
2759 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2760}
2761
2762
2763// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2764void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2765 VariableProxy* callee = expr->expression()->AsVariableProxy();
2766 if (callee->var()->IsLookupSlot()) {
2767 Label slow, done;
2768
2769 SetExpressionPosition(callee);
2770 // Generate code for loading from variables potentially shadowed by
2771 // eval-introduced variables.
2772 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2773
2774 __ bind(&slow);
2775 // Call the runtime to find the function to call (returned in v0)
2776 // and the object holding it (returned in v1).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002777 __ Push(callee->name());
2778 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2779 PushOperands(v0, v1); // Function, receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002780 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2781
2782 // If fast case code has been generated, emit code to push the
2783 // function and receiver and have the slow path jump around this
2784 // code.
2785 if (done.is_linked()) {
2786 Label call;
2787 __ Branch(&call);
2788 __ bind(&done);
2789 // Push function.
2790 __ push(v0);
2791 // The receiver is implicitly the global receiver. Indicate this
2792 // by passing the hole to the call function stub.
2793 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2794 __ push(a1);
2795 __ bind(&call);
2796 }
2797 } else {
2798 VisitForStackValue(callee);
2799 // refEnv.WithBaseObject()
2800 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002801 PushOperand(a2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002802 }
2803}
2804
2805
2806void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2807 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2808 // to resolve the function we need to call. Then we call the resolved
2809 // function using the given arguments.
2810 ZoneList<Expression*>* args = expr->arguments();
2811 int arg_count = args->length();
2812 PushCalleeAndWithBaseObject(expr);
2813
2814 // Push the arguments.
2815 for (int i = 0; i < arg_count; i++) {
2816 VisitForStackValue(args->at(i));
2817 }
2818
2819 // Push a copy of the function (found below the arguments) and
2820 // resolve eval.
2821 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2822 __ push(a1);
2823 EmitResolvePossiblyDirectEval(arg_count);
2824
2825 // Touch up the stack with the resolved function.
2826 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2827
2828 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2829 // Record source position for debugger.
2830 SetCallPosition(expr);
2831 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2832 __ li(a0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002833 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2834 expr->tail_call_mode()),
2835 RelocInfo::CODE_TARGET);
2836 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002837 RecordJSReturnSite(expr);
2838 // Restore context register.
2839 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2840 context()->DropAndPlug(1, v0);
2841}
2842
2843
2844void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2845 Comment cmnt(masm_, "[ CallNew");
2846 // According to ECMA-262, section 11.2.2, page 44, the function
2847 // expression in new calls must be evaluated before the
2848 // arguments.
2849
2850 // Push constructor on the stack. If it's not a function it's used as
2851 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2852 // ignored.g
2853 DCHECK(!expr->expression()->IsSuperPropertyReference());
2854 VisitForStackValue(expr->expression());
2855
2856 // Push the arguments ("left-to-right") on the stack.
2857 ZoneList<Expression*>* args = expr->arguments();
2858 int arg_count = args->length();
2859 for (int i = 0; i < arg_count; i++) {
2860 VisitForStackValue(args->at(i));
2861 }
2862
2863 // Call the construct call builtin that handles allocation and
2864 // constructor invocation.
2865 SetConstructCallPosition(expr);
2866
2867 // Load function and argument count into a1 and a0.
2868 __ li(a0, Operand(arg_count));
2869 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2870
2871 // Record call targets in unoptimized code.
2872 __ EmitLoadTypeFeedbackVector(a2);
2873 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2874
2875 CallConstructStub stub(isolate());
2876 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002877 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002878 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2879 // Restore context register.
2880 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2881 context()->Plug(v0);
2882}
2883
2884
2885void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2886 SuperCallReference* super_call_ref =
2887 expr->expression()->AsSuperCallReference();
2888 DCHECK_NOT_NULL(super_call_ref);
2889
2890 // Push the super constructor target on the stack (may be null,
2891 // but the Construct builtin can deal with that properly).
2892 VisitForAccumulatorValue(super_call_ref->this_function_var());
2893 __ AssertFunction(result_register());
2894 __ lw(result_register(),
2895 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2896 __ lw(result_register(),
2897 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002898 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002899
2900 // Push the arguments ("left-to-right") on the stack.
2901 ZoneList<Expression*>* args = expr->arguments();
2902 int arg_count = args->length();
2903 for (int i = 0; i < arg_count; i++) {
2904 VisitForStackValue(args->at(i));
2905 }
2906
2907 // Call the construct call builtin that handles allocation and
2908 // constructor invocation.
2909 SetConstructCallPosition(expr);
2910
2911 // Load new target into a3.
2912 VisitForAccumulatorValue(super_call_ref->new_target_var());
2913 __ mov(a3, result_register());
2914
2915 // Load function and argument count into a1 and a0.
2916 __ li(a0, Operand(arg_count));
2917 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2918
2919 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002920 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002921
2922 RecordJSReturnSite(expr);
2923
2924 // Restore context register.
2925 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2926 context()->Plug(v0);
2927}
2928
2929
2930void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2931 ZoneList<Expression*>* args = expr->arguments();
2932 DCHECK(args->length() == 1);
2933
2934 VisitForAccumulatorValue(args->at(0));
2935
2936 Label materialize_true, materialize_false;
2937 Label* if_true = NULL;
2938 Label* if_false = NULL;
2939 Label* fall_through = NULL;
2940 context()->PrepareTest(&materialize_true, &materialize_false,
2941 &if_true, &if_false, &fall_through);
2942
2943 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2944 __ SmiTst(v0, t0);
2945 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2946
2947 context()->Plug(if_true, if_false);
2948}
2949
2950
2951void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2952 ZoneList<Expression*>* args = expr->arguments();
2953 DCHECK(args->length() == 1);
2954
2955 VisitForAccumulatorValue(args->at(0));
2956
2957 Label materialize_true, materialize_false;
2958 Label* if_true = NULL;
2959 Label* if_false = NULL;
2960 Label* fall_through = NULL;
2961 context()->PrepareTest(&materialize_true, &materialize_false,
2962 &if_true, &if_false, &fall_through);
2963
2964 __ JumpIfSmi(v0, if_false);
2965 __ GetObjectType(v0, a1, a1);
2966 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2967 Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2968 if_true, if_false, fall_through);
2969
2970 context()->Plug(if_true, if_false);
2971}
2972
2973
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002974void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2975 ZoneList<Expression*>* args = expr->arguments();
2976 DCHECK(args->length() == 1);
2977
2978 VisitForAccumulatorValue(args->at(0));
2979
2980 Label materialize_true, materialize_false;
2981 Label* if_true = NULL;
2982 Label* if_false = NULL;
2983 Label* fall_through = NULL;
2984 context()->PrepareTest(&materialize_true, &materialize_false,
2985 &if_true, &if_false, &fall_through);
2986
2987 __ JumpIfSmi(v0, if_false);
2988 __ GetObjectType(v0, a1, a1);
2989 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2990 Split(eq, a1, Operand(JS_ARRAY_TYPE),
2991 if_true, if_false, fall_through);
2992
2993 context()->Plug(if_true, if_false);
2994}
2995
2996
2997void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2998 ZoneList<Expression*>* args = expr->arguments();
2999 DCHECK(args->length() == 1);
3000
3001 VisitForAccumulatorValue(args->at(0));
3002
3003 Label materialize_true, materialize_false;
3004 Label* if_true = NULL;
3005 Label* if_false = NULL;
3006 Label* fall_through = NULL;
3007 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3008 &if_false, &fall_through);
3009
3010 __ JumpIfSmi(v0, if_false);
3011 __ GetObjectType(v0, a1, a1);
3012 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3013 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
3014
3015 context()->Plug(if_true, if_false);
3016}
3017
3018
3019void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3020 ZoneList<Expression*>* args = expr->arguments();
3021 DCHECK(args->length() == 1);
3022
3023 VisitForAccumulatorValue(args->at(0));
3024
3025 Label materialize_true, materialize_false;
3026 Label* if_true = NULL;
3027 Label* if_false = NULL;
3028 Label* fall_through = NULL;
3029 context()->PrepareTest(&materialize_true, &materialize_false,
3030 &if_true, &if_false, &fall_through);
3031
3032 __ JumpIfSmi(v0, if_false);
3033 __ GetObjectType(v0, a1, a1);
3034 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3035 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3036
3037 context()->Plug(if_true, if_false);
3038}
3039
3040
3041void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3042 ZoneList<Expression*>* args = expr->arguments();
3043 DCHECK(args->length() == 1);
3044
3045 VisitForAccumulatorValue(args->at(0));
3046
3047 Label materialize_true, materialize_false;
3048 Label* if_true = NULL;
3049 Label* if_false = NULL;
3050 Label* fall_through = NULL;
3051 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3052 &if_false, &fall_through);
3053
3054 __ JumpIfSmi(v0, if_false);
3055 __ GetObjectType(v0, a1, a1);
3056 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3057 Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
3058
3059 context()->Plug(if_true, if_false);
3060}
3061
3062
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003063void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3064 ZoneList<Expression*>* args = expr->arguments();
3065 DCHECK(args->length() == 1);
3066 Label done, null, function, non_function_constructor;
3067
3068 VisitForAccumulatorValue(args->at(0));
3069
3070 // If the object is not a JSReceiver, we return null.
3071 __ JumpIfSmi(v0, &null);
3072 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3073 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3074 __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3075
3076 // Return 'Function' for JSFunction objects.
3077 __ Branch(&function, eq, a1, Operand(JS_FUNCTION_TYPE));
3078
3079 // Check if the constructor in the map is a JS function.
3080 Register instance_type = a2;
3081 __ GetMapConstructor(v0, v0, a1, instance_type);
3082 __ Branch(&non_function_constructor, ne, instance_type,
3083 Operand(JS_FUNCTION_TYPE));
3084
3085 // v0 now contains the constructor function. Grab the
3086 // instance class name from there.
3087 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3088 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3089 __ Branch(&done);
3090
3091 // Functions have class 'Function'.
3092 __ bind(&function);
3093 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3094 __ jmp(&done);
3095
3096 // Objects with a non-function constructor have class 'Object'.
3097 __ bind(&non_function_constructor);
3098 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3099 __ jmp(&done);
3100
3101 // Non-JS objects have class null.
3102 __ bind(&null);
3103 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3104
3105 // All done.
3106 __ bind(&done);
3107
3108 context()->Plug(v0);
3109}
3110
3111
3112void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3113 ZoneList<Expression*>* args = expr->arguments();
3114 DCHECK(args->length() == 1);
3115
3116 VisitForAccumulatorValue(args->at(0)); // Load the object.
3117
3118 Label done;
3119 // If the object is a smi return the object.
3120 __ JumpIfSmi(v0, &done);
3121 // If the object is not a value type, return the object.
3122 __ GetObjectType(v0, a1, a1);
3123 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3124
3125 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3126
3127 __ bind(&done);
3128 context()->Plug(v0);
3129}
3130
3131
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003132void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3133 ZoneList<Expression*>* args = expr->arguments();
3134 DCHECK_EQ(3, args->length());
3135
3136 Register string = v0;
3137 Register index = a1;
3138 Register value = a2;
3139
3140 VisitForStackValue(args->at(0)); // index
3141 VisitForStackValue(args->at(1)); // value
3142 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003143 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003144
3145 if (FLAG_debug_code) {
3146 __ SmiTst(value, at);
3147 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3148 __ SmiTst(index, at);
3149 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3150 __ SmiUntag(index, index);
3151 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3152 Register scratch = t5;
3153 __ EmitSeqStringSetCharCheck(
3154 string, index, value, scratch, one_byte_seq_type);
3155 __ SmiTag(index, index);
3156 }
3157
3158 __ SmiUntag(value, value);
3159 __ Addu(at,
3160 string,
3161 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3162 __ SmiUntag(index);
3163 __ Addu(at, at, index);
3164 __ sb(value, MemOperand(at));
3165 context()->Plug(string);
3166}
3167
3168
3169void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3170 ZoneList<Expression*>* args = expr->arguments();
3171 DCHECK_EQ(3, args->length());
3172
3173 Register string = v0;
3174 Register index = a1;
3175 Register value = a2;
3176
3177 VisitForStackValue(args->at(0)); // index
3178 VisitForStackValue(args->at(1)); // value
3179 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003180 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003181
3182 if (FLAG_debug_code) {
3183 __ SmiTst(value, at);
3184 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3185 __ SmiTst(index, at);
3186 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3187 __ SmiUntag(index, index);
3188 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3189 Register scratch = t5;
3190 __ EmitSeqStringSetCharCheck(
3191 string, index, value, scratch, two_byte_seq_type);
3192 __ SmiTag(index, index);
3193 }
3194
3195 __ SmiUntag(value, value);
3196 __ Addu(at,
3197 string,
3198 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3199 __ Addu(at, at, index);
3200 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3201 __ sh(value, MemOperand(at));
3202 context()->Plug(string);
3203}
3204
3205
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003206void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3207 ZoneList<Expression*>* args = expr->arguments();
3208 DCHECK_EQ(1, args->length());
3209
3210 // Load the argument into v0 and convert it.
3211 VisitForAccumulatorValue(args->at(0));
3212
3213 // Convert the object to an integer.
3214 Label done_convert;
3215 __ JumpIfSmi(v0, &done_convert);
3216 __ Push(v0);
3217 __ CallRuntime(Runtime::kToInteger);
3218 __ bind(&done_convert);
3219 context()->Plug(v0);
3220}
3221
3222
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003223void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3224 ZoneList<Expression*>* args = expr->arguments();
3225 DCHECK(args->length() == 1);
3226
3227 VisitForAccumulatorValue(args->at(0));
3228
3229 Label done;
3230 StringCharFromCodeGenerator generator(v0, a1);
3231 generator.GenerateFast(masm_);
3232 __ jmp(&done);
3233
3234 NopRuntimeCallHelper call_helper;
3235 generator.GenerateSlow(masm_, call_helper);
3236
3237 __ bind(&done);
3238 context()->Plug(a1);
3239}
3240
3241
3242void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3243 ZoneList<Expression*>* args = expr->arguments();
3244 DCHECK(args->length() == 2);
3245
3246 VisitForStackValue(args->at(0));
3247 VisitForAccumulatorValue(args->at(1));
3248 __ mov(a0, result_register());
3249
3250 Register object = a1;
3251 Register index = a0;
3252 Register result = v0;
3253
Ben Murdoch097c5b22016-05-18 11:27:45 +01003254 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003255
3256 Label need_conversion;
3257 Label index_out_of_range;
3258 Label done;
3259 StringCharCodeAtGenerator generator(object,
3260 index,
3261 result,
3262 &need_conversion,
3263 &need_conversion,
3264 &index_out_of_range,
3265 STRING_INDEX_IS_NUMBER);
3266 generator.GenerateFast(masm_);
3267 __ jmp(&done);
3268
3269 __ bind(&index_out_of_range);
3270 // When the index is out of range, the spec requires us to return
3271 // NaN.
3272 __ LoadRoot(result, Heap::kNanValueRootIndex);
3273 __ jmp(&done);
3274
3275 __ bind(&need_conversion);
3276 // Load the undefined value into the result register, which will
3277 // trigger conversion.
3278 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3279 __ jmp(&done);
3280
3281 NopRuntimeCallHelper call_helper;
3282 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3283
3284 __ bind(&done);
3285 context()->Plug(result);
3286}
3287
3288
3289void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3290 ZoneList<Expression*>* args = expr->arguments();
3291 DCHECK(args->length() == 2);
3292
3293 VisitForStackValue(args->at(0));
3294 VisitForAccumulatorValue(args->at(1));
3295 __ mov(a0, result_register());
3296
3297 Register object = a1;
3298 Register index = a0;
3299 Register scratch = a3;
3300 Register result = v0;
3301
Ben Murdoch097c5b22016-05-18 11:27:45 +01003302 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003303
3304 Label need_conversion;
3305 Label index_out_of_range;
3306 Label done;
3307 StringCharAtGenerator generator(object,
3308 index,
3309 scratch,
3310 result,
3311 &need_conversion,
3312 &need_conversion,
3313 &index_out_of_range,
3314 STRING_INDEX_IS_NUMBER);
3315 generator.GenerateFast(masm_);
3316 __ jmp(&done);
3317
3318 __ bind(&index_out_of_range);
3319 // When the index is out of range, the spec requires us to return
3320 // the empty string.
3321 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3322 __ jmp(&done);
3323
3324 __ bind(&need_conversion);
3325 // Move smi zero into the result register, which will trigger
3326 // conversion.
3327 __ li(result, Operand(Smi::FromInt(0)));
3328 __ jmp(&done);
3329
3330 NopRuntimeCallHelper call_helper;
3331 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3332
3333 __ bind(&done);
3334 context()->Plug(result);
3335}
3336
3337
3338void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3339 ZoneList<Expression*>* args = expr->arguments();
3340 DCHECK_LE(2, args->length());
3341 // Push target, receiver and arguments onto the stack.
3342 for (Expression* const arg : *args) {
3343 VisitForStackValue(arg);
3344 }
3345 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3346 // Move target to a1.
3347 int const argc = args->length() - 2;
3348 __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize));
3349 // Call the target.
3350 __ li(a0, Operand(argc));
3351 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003352 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003353 // Restore context register.
3354 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3355 // Discard the function left on TOS.
3356 context()->DropAndPlug(1, v0);
3357}
3358
3359
3360void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3361 ZoneList<Expression*>* args = expr->arguments();
3362 VisitForAccumulatorValue(args->at(0));
3363
3364 Label materialize_true, materialize_false;
3365 Label* if_true = NULL;
3366 Label* if_false = NULL;
3367 Label* fall_through = NULL;
3368 context()->PrepareTest(&materialize_true, &materialize_false,
3369 &if_true, &if_false, &fall_through);
3370
3371 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3372 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3373
3374 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3375 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3376
3377 context()->Plug(if_true, if_false);
3378}
3379
3380
3381void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3382 ZoneList<Expression*>* args = expr->arguments();
3383 DCHECK(args->length() == 1);
3384 VisitForAccumulatorValue(args->at(0));
3385
3386 __ AssertString(v0);
3387
3388 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3389 __ IndexFromHash(v0, v0);
3390
3391 context()->Plug(v0);
3392}
3393
3394
3395void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3396 ZoneList<Expression*>* args = expr->arguments();
3397 DCHECK_EQ(1, args->length());
3398 VisitForAccumulatorValue(args->at(0));
3399 __ AssertFunction(v0);
3400 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3401 __ lw(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
3402 context()->Plug(v0);
3403}
3404
3405
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003406void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3407 DCHECK(expr->arguments()->length() == 0);
3408 ExternalReference debug_is_active =
3409 ExternalReference::debug_is_active_address(isolate());
3410 __ li(at, Operand(debug_is_active));
3411 __ lb(v0, MemOperand(at));
3412 __ SmiTag(v0);
3413 context()->Plug(v0);
3414}
3415
3416
3417void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3418 ZoneList<Expression*>* args = expr->arguments();
3419 DCHECK_EQ(2, args->length());
3420 VisitForStackValue(args->at(0));
3421 VisitForStackValue(args->at(1));
3422
3423 Label runtime, done;
3424
3425 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT);
3426 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
3427 __ Pop(a2, a3);
3428 __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
3429 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3430 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3431 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
3432 __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
3433 __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
3434 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3435 __ jmp(&done);
3436
3437 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003438 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003439
3440 __ bind(&done);
3441 context()->Plug(v0);
3442}
3443
3444
3445void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3446 // Push undefined as the receiver.
3447 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003448 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003449
3450 __ LoadNativeContextSlot(expr->context_index(), v0);
3451}
3452
3453
3454void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3455 ZoneList<Expression*>* args = expr->arguments();
3456 int arg_count = args->length();
3457
3458 SetCallPosition(expr);
3459 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3460 __ li(a0, Operand(arg_count));
3461 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3462 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003463 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003464}
3465
3466
3467void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3468 ZoneList<Expression*>* args = expr->arguments();
3469 int arg_count = args->length();
3470
3471 if (expr->is_jsruntime()) {
3472 Comment cmnt(masm_, "[ CallRuntime");
3473 EmitLoadJSRuntimeFunction(expr);
3474
3475 // Push the target function under the receiver.
3476 __ lw(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003477 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003478 __ sw(v0, MemOperand(sp, kPointerSize));
3479
3480 // Push the arguments ("left-to-right").
3481 for (int i = 0; i < arg_count; i++) {
3482 VisitForStackValue(args->at(i));
3483 }
3484
3485 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3486 EmitCallJSRuntimeFunction(expr);
3487
3488 // Restore context register.
3489 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3490
3491 context()->DropAndPlug(1, v0);
3492
3493 } else {
3494 const Runtime::Function* function = expr->function();
3495 switch (function->function_id) {
3496#define CALL_INTRINSIC_GENERATOR(Name) \
3497 case Runtime::kInline##Name: { \
3498 Comment cmnt(masm_, "[ Inline" #Name); \
3499 return Emit##Name(expr); \
3500 }
3501 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3502#undef CALL_INTRINSIC_GENERATOR
3503 default: {
3504 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3505 // Push the arguments ("left-to-right").
3506 for (int i = 0; i < arg_count; i++) {
3507 VisitForStackValue(args->at(i));
3508 }
3509
3510 // Call the C runtime function.
3511 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3512 __ CallRuntime(expr->function(), arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003513 OperandStackDepthDecrement(arg_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003514 context()->Plug(v0);
3515 }
3516 }
3517 }
3518}
3519
3520
3521void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3522 switch (expr->op()) {
3523 case Token::DELETE: {
3524 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3525 Property* property = expr->expression()->AsProperty();
3526 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3527
3528 if (property != NULL) {
3529 VisitForStackValue(property->obj());
3530 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003531 CallRuntimeWithOperands(is_strict(language_mode())
3532 ? Runtime::kDeleteProperty_Strict
3533 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003534 context()->Plug(v0);
3535 } else if (proxy != NULL) {
3536 Variable* var = proxy->var();
3537 // Delete of an unqualified identifier is disallowed in strict mode but
3538 // "delete this" is allowed.
3539 bool is_this = var->HasThisName(isolate());
3540 DCHECK(is_sloppy(language_mode()) || is_this);
3541 if (var->IsUnallocatedOrGlobalSlot()) {
3542 __ LoadGlobalObject(a2);
3543 __ li(a1, Operand(var->name()));
3544 __ Push(a2, a1);
3545 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3546 context()->Plug(v0);
3547 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3548 // Result of deleting non-global, non-dynamic variables is false.
3549 // The subexpression does not have side effects.
3550 context()->Plug(is_this);
3551 } else {
3552 // Non-global variable. Call the runtime to try to delete from the
3553 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003554 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003555 __ CallRuntime(Runtime::kDeleteLookupSlot);
3556 context()->Plug(v0);
3557 }
3558 } else {
3559 // Result of deleting non-property, non-variable reference is true.
3560 // The subexpression may have side effects.
3561 VisitForEffect(expr->expression());
3562 context()->Plug(true);
3563 }
3564 break;
3565 }
3566
3567 case Token::VOID: {
3568 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3569 VisitForEffect(expr->expression());
3570 context()->Plug(Heap::kUndefinedValueRootIndex);
3571 break;
3572 }
3573
3574 case Token::NOT: {
3575 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3576 if (context()->IsEffect()) {
3577 // Unary NOT has no side effects so it's only necessary to visit the
3578 // subexpression. Match the optimizing compiler by not branching.
3579 VisitForEffect(expr->expression());
3580 } else if (context()->IsTest()) {
3581 const TestContext* test = TestContext::cast(context());
3582 // The labels are swapped for the recursive call.
3583 VisitForControl(expr->expression(),
3584 test->false_label(),
3585 test->true_label(),
3586 test->fall_through());
3587 context()->Plug(test->true_label(), test->false_label());
3588 } else {
3589 // We handle value contexts explicitly rather than simply visiting
3590 // for control and plugging the control flow into the context,
3591 // because we need to prepare a pair of extra administrative AST ids
3592 // for the optimizing compiler.
3593 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3594 Label materialize_true, materialize_false, done;
3595 VisitForControl(expr->expression(),
3596 &materialize_false,
3597 &materialize_true,
3598 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003599 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003600 __ bind(&materialize_true);
3601 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3602 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3603 if (context()->IsStackValue()) __ push(v0);
3604 __ jmp(&done);
3605 __ bind(&materialize_false);
3606 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3607 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3608 if (context()->IsStackValue()) __ push(v0);
3609 __ bind(&done);
3610 }
3611 break;
3612 }
3613
3614 case Token::TYPEOF: {
3615 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3616 {
3617 AccumulatorValueContext context(this);
3618 VisitForTypeofValue(expr->expression());
3619 }
3620 __ mov(a3, v0);
3621 TypeofStub typeof_stub(isolate());
3622 __ CallStub(&typeof_stub);
3623 context()->Plug(v0);
3624 break;
3625 }
3626
3627 default:
3628 UNREACHABLE();
3629 }
3630}
3631
3632
3633void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3634 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3635
3636 Comment cmnt(masm_, "[ CountOperation");
3637
3638 Property* prop = expr->expression()->AsProperty();
3639 LhsKind assign_type = Property::GetAssignType(prop);
3640
3641 // Evaluate expression and get value.
3642 if (assign_type == VARIABLE) {
3643 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3644 AccumulatorValueContext context(this);
3645 EmitVariableLoad(expr->expression()->AsVariableProxy());
3646 } else {
3647 // Reserve space for result of postfix operation.
3648 if (expr->is_postfix() && !context()->IsEffect()) {
3649 __ li(at, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003650 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003651 }
3652 switch (assign_type) {
3653 case NAMED_PROPERTY: {
3654 // Put the object both on the stack and in the register.
3655 VisitForStackValue(prop->obj());
3656 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3657 EmitNamedPropertyLoad(prop);
3658 break;
3659 }
3660
3661 case NAMED_SUPER_PROPERTY: {
3662 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3663 VisitForAccumulatorValue(
3664 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003665 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003666 const Register scratch = a1;
3667 __ lw(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003668 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003669 EmitNamedSuperPropertyLoad(prop);
3670 break;
3671 }
3672
3673 case KEYED_SUPER_PROPERTY: {
3674 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3675 VisitForAccumulatorValue(
3676 prop->obj()->AsSuperPropertyReference()->home_object());
3677 const Register scratch = a1;
3678 const Register scratch1 = t0;
3679 __ Move(scratch, result_register());
3680 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003681 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003682 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003683 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003684 EmitKeyedSuperPropertyLoad(prop);
3685 break;
3686 }
3687
3688 case KEYED_PROPERTY: {
3689 VisitForStackValue(prop->obj());
3690 VisitForStackValue(prop->key());
3691 __ lw(LoadDescriptor::ReceiverRegister(),
3692 MemOperand(sp, 1 * kPointerSize));
3693 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3694 EmitKeyedPropertyLoad(prop);
3695 break;
3696 }
3697
3698 case VARIABLE:
3699 UNREACHABLE();
3700 }
3701 }
3702
3703 // We need a second deoptimization point after loading the value
3704 // in case evaluating the property load my have a side effect.
3705 if (assign_type == VARIABLE) {
3706 PrepareForBailout(expr->expression(), TOS_REG);
3707 } else {
3708 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3709 }
3710
3711 // Inline smi case if we are in a loop.
3712 Label stub_call, done;
3713 JumpPatchSite patch_site(masm_);
3714
3715 int count_value = expr->op() == Token::INC ? 1 : -1;
3716 __ mov(a0, v0);
3717 if (ShouldInlineSmiCase(expr->op())) {
3718 Label slow;
3719 patch_site.EmitJumpIfNotSmi(v0, &slow);
3720
3721 // Save result for postfix expressions.
3722 if (expr->is_postfix()) {
3723 if (!context()->IsEffect()) {
3724 // Save the result on the stack. If we have a named or keyed property
3725 // we store the result under the receiver that is currently on top
3726 // of the stack.
3727 switch (assign_type) {
3728 case VARIABLE:
3729 __ push(v0);
3730 break;
3731 case NAMED_PROPERTY:
3732 __ sw(v0, MemOperand(sp, kPointerSize));
3733 break;
3734 case NAMED_SUPER_PROPERTY:
3735 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3736 break;
3737 case KEYED_PROPERTY:
3738 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3739 break;
3740 case KEYED_SUPER_PROPERTY:
3741 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
3742 break;
3743 }
3744 }
3745 }
3746
3747 Register scratch1 = a1;
3748 __ li(scratch1, Operand(Smi::FromInt(count_value)));
3749 __ AddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3750 // Call stub. Undo operation first.
3751 __ Move(v0, a0);
3752 __ jmp(&stub_call);
3753 __ bind(&slow);
3754 }
3755 if (!is_strong(language_mode())) {
3756 ToNumberStub convert_stub(isolate());
3757 __ CallStub(&convert_stub);
3758 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3759 }
3760
3761 // Save result for postfix expressions.
3762 if (expr->is_postfix()) {
3763 if (!context()->IsEffect()) {
3764 // Save the result on the stack. If we have a named or keyed property
3765 // we store the result under the receiver that is currently on top
3766 // of the stack.
3767 switch (assign_type) {
3768 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003769 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003770 break;
3771 case NAMED_PROPERTY:
3772 __ sw(v0, MemOperand(sp, kPointerSize));
3773 break;
3774 case NAMED_SUPER_PROPERTY:
3775 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3776 break;
3777 case KEYED_PROPERTY:
3778 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3779 break;
3780 case KEYED_SUPER_PROPERTY:
3781 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
3782 break;
3783 }
3784 }
3785 }
3786
3787 __ bind(&stub_call);
3788 __ mov(a1, v0);
3789 __ li(a0, Operand(Smi::FromInt(count_value)));
3790
3791 SetExpressionPosition(expr);
3792
Ben Murdoch097c5b22016-05-18 11:27:45 +01003793 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003794 CallIC(code, expr->CountBinOpFeedbackId());
3795 patch_site.EmitPatchInfo();
3796 __ bind(&done);
3797
3798 if (is_strong(language_mode())) {
3799 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3800 }
3801 // Store the value returned in v0.
3802 switch (assign_type) {
3803 case VARIABLE:
3804 if (expr->is_postfix()) {
3805 { EffectContext context(this);
3806 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3807 Token::ASSIGN, expr->CountSlot());
3808 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3809 context.Plug(v0);
3810 }
3811 // For all contexts except EffectConstant we have the result on
3812 // top of the stack.
3813 if (!context()->IsEffect()) {
3814 context()->PlugTOS();
3815 }
3816 } else {
3817 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3818 Token::ASSIGN, expr->CountSlot());
3819 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3820 context()->Plug(v0);
3821 }
3822 break;
3823 case NAMED_PROPERTY: {
3824 __ mov(StoreDescriptor::ValueRegister(), result_register());
3825 __ li(StoreDescriptor::NameRegister(),
3826 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003827 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003828 EmitLoadStoreICSlot(expr->CountSlot());
3829 CallStoreIC();
3830 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3831 if (expr->is_postfix()) {
3832 if (!context()->IsEffect()) {
3833 context()->PlugTOS();
3834 }
3835 } else {
3836 context()->Plug(v0);
3837 }
3838 break;
3839 }
3840 case NAMED_SUPER_PROPERTY: {
3841 EmitNamedSuperPropertyStore(prop);
3842 if (expr->is_postfix()) {
3843 if (!context()->IsEffect()) {
3844 context()->PlugTOS();
3845 }
3846 } else {
3847 context()->Plug(v0);
3848 }
3849 break;
3850 }
3851 case KEYED_SUPER_PROPERTY: {
3852 EmitKeyedSuperPropertyStore(prop);
3853 if (expr->is_postfix()) {
3854 if (!context()->IsEffect()) {
3855 context()->PlugTOS();
3856 }
3857 } else {
3858 context()->Plug(v0);
3859 }
3860 break;
3861 }
3862 case KEYED_PROPERTY: {
3863 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003864 PopOperands(StoreDescriptor::ReceiverRegister(),
3865 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003866 Handle<Code> ic =
3867 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3868 EmitLoadStoreICSlot(expr->CountSlot());
3869 CallIC(ic);
3870 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3871 if (expr->is_postfix()) {
3872 if (!context()->IsEffect()) {
3873 context()->PlugTOS();
3874 }
3875 } else {
3876 context()->Plug(v0);
3877 }
3878 break;
3879 }
3880 }
3881}
3882
3883
3884void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3885 Expression* sub_expr,
3886 Handle<String> check) {
3887 Label materialize_true, materialize_false;
3888 Label* if_true = NULL;
3889 Label* if_false = NULL;
3890 Label* fall_through = NULL;
3891 context()->PrepareTest(&materialize_true, &materialize_false,
3892 &if_true, &if_false, &fall_through);
3893
3894 { AccumulatorValueContext context(this);
3895 VisitForTypeofValue(sub_expr);
3896 }
3897 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3898
3899 Factory* factory = isolate()->factory();
3900 if (String::Equals(check, factory->number_string())) {
3901 __ JumpIfSmi(v0, if_true);
3902 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3903 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3904 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3905 } else if (String::Equals(check, factory->string_string())) {
3906 __ JumpIfSmi(v0, if_false);
3907 __ GetObjectType(v0, v0, a1);
3908 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3909 fall_through);
3910 } else if (String::Equals(check, factory->symbol_string())) {
3911 __ JumpIfSmi(v0, if_false);
3912 __ GetObjectType(v0, v0, a1);
3913 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3914 } else if (String::Equals(check, factory->boolean_string())) {
3915 __ LoadRoot(at, Heap::kTrueValueRootIndex);
3916 __ Branch(if_true, eq, v0, Operand(at));
3917 __ LoadRoot(at, Heap::kFalseValueRootIndex);
3918 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3919 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003920 __ LoadRoot(at, Heap::kNullValueRootIndex);
3921 __ Branch(if_false, eq, v0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003922 __ JumpIfSmi(v0, if_false);
3923 // Check for undetectable objects => true.
3924 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3925 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3926 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3927 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3928 } else if (String::Equals(check, factory->function_string())) {
3929 __ JumpIfSmi(v0, if_false);
3930 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3931 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3932 __ And(a1, a1,
3933 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3934 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3935 fall_through);
3936 } else if (String::Equals(check, factory->object_string())) {
3937 __ JumpIfSmi(v0, if_false);
3938 __ LoadRoot(at, Heap::kNullValueRootIndex);
3939 __ Branch(if_true, eq, v0, Operand(at));
3940 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3941 __ GetObjectType(v0, v0, a1);
3942 __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3943 // Check for callable or undetectable objects => false.
3944 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3945 __ And(a1, a1,
3946 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3947 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3948// clang-format off
3949#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3950 } else if (String::Equals(check, factory->type##_string())) { \
3951 __ JumpIfSmi(v0, if_false); \
3952 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
3953 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
3954 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3955 SIMD128_TYPES(SIMD128_TYPE)
3956#undef SIMD128_TYPE
3957 // clang-format on
3958 } else {
3959 if (if_false != fall_through) __ jmp(if_false);
3960 }
3961 context()->Plug(if_true, if_false);
3962}
3963
3964
3965void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3966 Comment cmnt(masm_, "[ CompareOperation");
3967 SetExpressionPosition(expr);
3968
3969 // First we try a fast inlined version of the compare when one of
3970 // the operands is a literal.
3971 if (TryLiteralCompare(expr)) return;
3972
3973 // Always perform the comparison for its control flow. Pack the result
3974 // into the expression's context after the comparison is performed.
3975 Label materialize_true, materialize_false;
3976 Label* if_true = NULL;
3977 Label* if_false = NULL;
3978 Label* fall_through = NULL;
3979 context()->PrepareTest(&materialize_true, &materialize_false,
3980 &if_true, &if_false, &fall_through);
3981
3982 Token::Value op = expr->op();
3983 VisitForStackValue(expr->left());
3984 switch (op) {
3985 case Token::IN:
3986 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003987 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003988 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3989 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
3990 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
3991 break;
3992
3993 case Token::INSTANCEOF: {
3994 VisitForAccumulatorValue(expr->right());
3995 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003996 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003997 InstanceOfStub stub(isolate());
3998 __ CallStub(&stub);
3999 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4000 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4001 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4002 break;
4003 }
4004
4005 default: {
4006 VisitForAccumulatorValue(expr->right());
4007 Condition cc = CompareIC::ComputeCondition(op);
4008 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01004009 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004010
4011 bool inline_smi_code = ShouldInlineSmiCase(op);
4012 JumpPatchSite patch_site(masm_);
4013 if (inline_smi_code) {
4014 Label slow_case;
4015 __ Or(a2, a0, Operand(a1));
4016 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4017 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4018 __ bind(&slow_case);
4019 }
4020
Ben Murdoch097c5b22016-05-18 11:27:45 +01004021 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004022 CallIC(ic, expr->CompareOperationFeedbackId());
4023 patch_site.EmitPatchInfo();
4024 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4025 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4026 }
4027 }
4028
4029 // Convert the result of the comparison into one expected for this
4030 // expression's context.
4031 context()->Plug(if_true, if_false);
4032}
4033
4034
4035void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4036 Expression* sub_expr,
4037 NilValue nil) {
4038 Label materialize_true, materialize_false;
4039 Label* if_true = NULL;
4040 Label* if_false = NULL;
4041 Label* fall_through = NULL;
4042 context()->PrepareTest(&materialize_true, &materialize_false,
4043 &if_true, &if_false, &fall_through);
4044
4045 VisitForAccumulatorValue(sub_expr);
4046 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4047 __ mov(a0, result_register());
4048 if (expr->op() == Token::EQ_STRICT) {
4049 Heap::RootListIndex nil_value = nil == kNullValue ?
4050 Heap::kNullValueRootIndex :
4051 Heap::kUndefinedValueRootIndex;
4052 __ LoadRoot(a1, nil_value);
4053 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4054 } else {
4055 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4056 CallIC(ic, expr->CompareOperationFeedbackId());
4057 __ LoadRoot(a1, Heap::kTrueValueRootIndex);
4058 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
4059 }
4060 context()->Plug(if_true, if_false);
4061}
4062
4063
4064void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4065 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4066 context()->Plug(v0);
4067}
4068
4069
4070Register FullCodeGenerator::result_register() {
4071 return v0;
4072}
4073
4074
4075Register FullCodeGenerator::context_register() {
4076 return cp;
4077}
4078
4079
4080void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4081 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4082 __ sw(value, MemOperand(fp, frame_offset));
4083}
4084
4085
4086void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4087 __ lw(dst, ContextMemOperand(cp, context_index));
4088}
4089
4090
4091void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4092 Scope* closure_scope = scope()->ClosureScope();
4093 if (closure_scope->is_script_scope() ||
4094 closure_scope->is_module_scope()) {
4095 // Contexts nested in the native context have a canonical empty function
4096 // as their closure, not the anonymous closure containing the global
4097 // code.
4098 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
4099 } else if (closure_scope->is_eval_scope()) {
4100 // Contexts created by a call to eval have the same closure as the
4101 // context calling eval, not the anonymous closure containing the eval
4102 // code. Fetch it from the context.
4103 __ lw(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4104 } else {
4105 DCHECK(closure_scope->is_function_scope());
4106 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4107 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004108 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004109}
4110
4111
4112// ----------------------------------------------------------------------------
4113// Non-local control flow support.
4114
4115void FullCodeGenerator::EnterFinallyBlock() {
4116 DCHECK(!result_register().is(a1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004117 // Store pending message while executing finally block.
4118 ExternalReference pending_message_obj =
4119 ExternalReference::address_of_pending_message_obj(isolate());
4120 __ li(at, Operand(pending_message_obj));
4121 __ lw(a1, MemOperand(at));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004122 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004123
4124 ClearPendingMessage();
4125}
4126
4127
4128void FullCodeGenerator::ExitFinallyBlock() {
4129 DCHECK(!result_register().is(a1));
4130 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004131 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004132 ExternalReference pending_message_obj =
4133 ExternalReference::address_of_pending_message_obj(isolate());
4134 __ li(at, Operand(pending_message_obj));
4135 __ sw(a1, MemOperand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004136}
4137
4138
4139void FullCodeGenerator::ClearPendingMessage() {
4140 DCHECK(!result_register().is(a1));
4141 ExternalReference pending_message_obj =
4142 ExternalReference::address_of_pending_message_obj(isolate());
4143 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
4144 __ li(at, Operand(pending_message_obj));
4145 __ sw(a1, MemOperand(at));
4146}
4147
4148
4149void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4150 DCHECK(!slot.IsInvalid());
4151 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
4152 Operand(SmiFromSlot(slot)));
4153}
4154
Ben Murdoch097c5b22016-05-18 11:27:45 +01004155void FullCodeGenerator::DeferredCommands::EmitCommands() {
4156 DCHECK(!result_register().is(a1));
4157 __ Pop(result_register()); // Restore the accumulator.
4158 __ Pop(a1); // Get the token.
4159 for (DeferredCommand cmd : commands_) {
4160 Label skip;
4161 __ li(at, Operand(Smi::FromInt(cmd.token)));
4162 __ Branch(&skip, ne, a1, Operand(at));
4163 switch (cmd.command) {
4164 case kReturn:
4165 codegen_->EmitUnwindAndReturn();
4166 break;
4167 case kThrow:
4168 __ Push(result_register());
4169 __ CallRuntime(Runtime::kReThrow);
4170 break;
4171 case kContinue:
4172 codegen_->EmitContinue(cmd.target);
4173 break;
4174 case kBreak:
4175 codegen_->EmitBreak(cmd.target);
4176 break;
4177 }
4178 __ bind(&skip);
4179 }
4180}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004181
4182#undef __
4183
4184
4185void BackEdgeTable::PatchAt(Code* unoptimized_code,
4186 Address pc,
4187 BackEdgeState target_state,
4188 Code* replacement_code) {
4189 static const int kInstrSize = Assembler::kInstrSize;
4190 Address branch_address = pc - 6 * kInstrSize;
4191 Isolate* isolate = unoptimized_code->GetIsolate();
4192 CodePatcher patcher(isolate, branch_address, 1);
4193
4194 switch (target_state) {
4195 case INTERRUPT:
4196 // slt at, a3, zero_reg (in case of count based interrupts)
4197 // beq at, zero_reg, ok
4198 // lui t9, <interrupt stub address> upper
4199 // ori t9, <interrupt stub address> lower
4200 // jalr t9
4201 // nop
4202 // ok-label ----- pc_after points here
4203 patcher.masm()->slt(at, a3, zero_reg);
4204 break;
4205 case ON_STACK_REPLACEMENT:
4206 case OSR_AFTER_STACK_CHECK:
4207 // addiu at, zero_reg, 1
4208 // beq at, zero_reg, ok ;; Not changed
4209 // lui t9, <on-stack replacement address> upper
4210 // ori t9, <on-stack replacement address> lower
4211 // jalr t9 ;; Not changed
4212 // nop ;; Not changed
4213 // ok-label ----- pc_after points here
4214 patcher.masm()->addiu(at, zero_reg, 1);
4215 break;
4216 }
4217 Address pc_immediate_load_address = pc - 4 * kInstrSize;
4218 // Replace the stack check address in the load-immediate (lui/ori pair)
4219 // with the entry address of the replacement code.
4220 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
4221 replacement_code->entry());
4222
4223 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4224 unoptimized_code, pc_immediate_load_address, replacement_code);
4225}
4226
4227
4228BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4229 Isolate* isolate,
4230 Code* unoptimized_code,
4231 Address pc) {
4232 static const int kInstrSize = Assembler::kInstrSize;
4233 Address branch_address = pc - 6 * kInstrSize;
4234 Address pc_immediate_load_address = pc - 4 * kInstrSize;
4235
4236 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
4237 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4238 DCHECK(reinterpret_cast<uint32_t>(
4239 Assembler::target_address_at(pc_immediate_load_address)) ==
4240 reinterpret_cast<uint32_t>(
4241 isolate->builtins()->InterruptCheck()->entry()));
4242 return INTERRUPT;
4243 }
4244
4245 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4246
4247 if (reinterpret_cast<uint32_t>(
4248 Assembler::target_address_at(pc_immediate_load_address)) ==
4249 reinterpret_cast<uint32_t>(
4250 isolate->builtins()->OnStackReplacement()->entry())) {
4251 return ON_STACK_REPLACEMENT;
4252 }
4253
4254 DCHECK(reinterpret_cast<uint32_t>(
4255 Assembler::target_address_at(pc_immediate_load_address)) ==
4256 reinterpret_cast<uint32_t>(
4257 isolate->builtins()->OsrAfterStackCheck()->entry()));
4258 return OSR_AFTER_STACK_CHECK;
4259}
4260
4261
4262} // namespace internal
4263} // namespace v8
4264
4265#endif // V8_TARGET_ARCH_MIPS