blob: a93489da7f2bf52b8fe7452f7e71e2de7767929b [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS64
6
7// Note on Mips implementation:
8//
9// The result_register() for mips is the 'v0' register, which is defined
10// by the ABI to contain function return values. However, the first
11// parameter to a function is defined to be 'a0'. So there are many
12// places where we have to move a previous result in v0 to a0 for the
13// next call: mov(a0, v0). This is not needed on the other architectures.
14
15#include "src/ast/scopes.h"
16#include "src/code-factory.h"
17#include "src/code-stubs.h"
18#include "src/codegen.h"
19#include "src/debug/debug.h"
20#include "src/full-codegen/full-codegen.h"
21#include "src/ic/ic.h"
22#include "src/parsing/parser.h"
23
24#include "src/mips64/code-stubs-mips64.h"
25#include "src/mips64/macro-assembler-mips64.h"
26
27namespace v8 {
28namespace internal {
29
Ben Murdoch097c5b22016-05-18 11:27:45 +010030#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031
32// A patch site is a location in the code which it is possible to patch. This
33// class has a number of methods to emit the code which is patchable and the
34// method EmitPatchInfo to record a marker back to the patchable code. This
35// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
36// (raw 16 bit immediate value is used) is the delta from the pc to the first
37// instruction of the patchable code.
38// The marker instruction is effectively a NOP (dest is zero_reg) and will
39// never be emitted by normal code.
40class JumpPatchSite BASE_EMBEDDED {
41 public:
42 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
43#ifdef DEBUG
44 info_emitted_ = false;
45#endif
46 }
47
48 ~JumpPatchSite() {
49 DCHECK(patch_site_.is_bound() == info_emitted_);
50 }
51
52 // When initially emitting this ensure that a jump is always generated to skip
53 // the inlined smi code.
54 void EmitJumpIfNotSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
57 __ bind(&patch_site_);
58 __ andi(at, reg, 0);
59 // Always taken before patched.
60 __ BranchShort(target, eq, at, Operand(zero_reg));
61 }
62
63 // When initially emitting this ensure that a jump is never generated to skip
64 // the inlined smi code.
65 void EmitJumpIfSmi(Register reg, Label* target) {
66 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 __ bind(&patch_site_);
69 __ andi(at, reg, 0);
70 // Never taken before patched.
71 __ BranchShort(target, ne, at, Operand(zero_reg));
72 }
73
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
77 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
78 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
79#ifdef DEBUG
80 info_emitted_ = true;
81#endif
82 } else {
83 __ nop(); // Signals no inlined code.
84 }
85 }
86
87 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010088 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089 MacroAssembler* masm_;
90 Label patch_site_;
91#ifdef DEBUG
92 bool info_emitted_;
93#endif
94};
95
96
97// Generate code for a JS function. On entry to the function the receiver
98// and arguments have been pushed on the stack left to right. The actual
99// argument count matches the formal parameter count expected by the
100// function.
101//
102// The live registers are:
103// o a1: the JS function object being called (i.e. ourselves)
104// o a3: the new target value
105// o cp: our context
106// o fp: our caller's frame pointer
107// o sp: stack pointer
108// o ra: return address
109//
110// The function builds a JS frame. Please see JavaScriptFrameConstants in
111// frames-mips.h for its layout.
112void FullCodeGenerator::Generate() {
113 CompilationInfo* info = info_;
114 profiling_counter_ = isolate()->factory()->NewCell(
115 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116 SetFunctionPosition(literal());
117 Comment cmnt(masm_, "[ function compiled by full code generator");
118
119 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000121 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
122 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
123 __ ld(a2, MemOperand(sp, receiver_offset));
124 __ AssertNotSmi(a2);
125 __ GetObjectType(a2, a2, a2);
126 __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
127 Operand(FIRST_JS_RECEIVER_TYPE));
128 }
129
130 // Open a frame scope to indicate that there is a frame on the stack. The
131 // MANUAL indicates that the scope shouldn't actually generate code to set up
132 // the frame (that is done below).
133 FrameScope frame_scope(masm_, StackFrame::MANUAL);
134 info->set_prologue_offset(masm_->pc_offset());
135 __ Prologue(info->GeneratePreagedPrologue());
136
137 { Comment cmnt(masm_, "[ Allocate locals");
138 int locals_count = info->scope()->num_stack_slots();
139 // Generators allocate locals, if any, in context slots.
140 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100141 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 if (locals_count > 0) {
143 if (locals_count >= 128) {
144 Label ok;
145 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
146 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
147 __ Branch(&ok, hs, t1, Operand(a2));
148 __ CallRuntime(Runtime::kThrowStackOverflow);
149 __ bind(&ok);
150 }
151 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
152 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
153 if (locals_count >= kMaxPushes) {
154 int loop_iterations = locals_count / kMaxPushes;
155 __ li(a2, Operand(loop_iterations));
156 Label loop_header;
157 __ bind(&loop_header);
158 // Do pushes.
159 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
160 for (int i = 0; i < kMaxPushes; i++) {
161 __ sd(t1, MemOperand(sp, i * kPointerSize));
162 }
163 // Continue loop if not done.
164 __ Dsubu(a2, a2, Operand(1));
165 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
166 }
167 int remaining = locals_count % kMaxPushes;
168 // Emit the remaining pushes.
169 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
170 for (int i = 0; i < remaining; i++) {
171 __ sd(t1, MemOperand(sp, i * kPointerSize));
172 }
173 }
174 }
175
176 bool function_in_register_a1 = true;
177
178 // Possibly allocate a local context.
179 if (info->scope()->num_heap_slots() > 0) {
180 Comment cmnt(masm_, "[ Allocate context");
181 // Argument to NewContext is the function, which is still in a1.
182 bool need_write_barrier = true;
183 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (info->scope()->is_script_scope()) {
185 __ push(a1);
186 __ Push(info->scope()->GetScopeInfo(info->isolate()));
187 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100188 PrepareForBailoutForId(BailoutId::ScriptContext(),
189 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000190 // The new target value is not used, clobbering is safe.
191 DCHECK_NULL(info->scope()->new_target_var());
192 } else {
193 if (info->scope()->new_target_var() != nullptr) {
194 __ push(a3); // Preserve new target.
195 }
196 if (slots <= FastNewContextStub::kMaximumSlots) {
197 FastNewContextStub stub(isolate(), slots);
198 __ CallStub(&stub);
199 // Result of FastNewContextStub is always in new space.
200 need_write_barrier = false;
201 } else {
202 __ push(a1);
203 __ CallRuntime(Runtime::kNewFunctionContext);
204 }
205 if (info->scope()->new_target_var() != nullptr) {
206 __ pop(a3); // Restore new target.
207 }
208 }
209 function_in_register_a1 = false;
210 // Context is returned in v0. It replaces the context passed to us.
211 // It's saved in the stack and kept live in cp.
212 __ mov(cp, v0);
213 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
214 // Copy any necessary parameters into the context.
215 int num_parameters = info->scope()->num_parameters();
216 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
217 for (int i = first_parameter; i < num_parameters; i++) {
218 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
219 if (var->IsContextSlot()) {
220 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221 (num_parameters - 1 - i) * kPointerSize;
222 // Load parameter from stack.
223 __ ld(a0, MemOperand(fp, parameter_offset));
224 // Store it in the context.
225 MemOperand target = ContextMemOperand(cp, var->index());
226 __ sd(a0, target);
227
228 // Update the write barrier.
229 if (need_write_barrier) {
230 __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
231 kRAHasBeenSaved, kDontSaveFPRegs);
232 } else if (FLAG_debug_code) {
233 Label done;
234 __ JumpIfInNewSpace(cp, a0, &done);
235 __ Abort(kExpectedNewSpaceObject);
236 __ bind(&done);
237 }
238 }
239 }
240 }
241
242 // Register holding this function and new target are both trashed in case we
243 // bailout here. But since that can happen only when new target is not used
244 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100245 PrepareForBailoutForId(BailoutId::FunctionContext(),
246 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247
248 // Possibly set up a local binding to the this function which is used in
249 // derived constructors with super calls.
250 Variable* this_function_var = scope()->this_function_var();
251 if (this_function_var != nullptr) {
252 Comment cmnt(masm_, "[ This function");
253 if (!function_in_register_a1) {
254 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
255 // The write barrier clobbers register again, keep it marked as such.
256 }
257 SetVar(this_function_var, a1, a0, a2);
258 }
259
260 Variable* new_target_var = scope()->new_target_var();
261 if (new_target_var != nullptr) {
262 Comment cmnt(masm_, "[ new.target");
263 SetVar(new_target_var, a3, a0, a2);
264 }
265
266 // Possibly allocate RestParameters
267 int rest_index;
268 Variable* rest_param = scope()->rest_parameter(&rest_index);
269 if (rest_param) {
270 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100271 if (!function_in_register_a1) {
272 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
273 }
274 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100276 function_in_register_a1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277 SetVar(rest_param, v0, a1, a2);
278 }
279
280 Variable* arguments = scope()->arguments();
281 if (arguments != NULL) {
282 // Function uses arguments object.
283 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000284 if (!function_in_register_a1) {
285 // Load this again, if it's used by the local context below.
286 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
287 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100288 if (is_strict(language_mode()) || !has_simple_parameters()) {
289 FastNewStrictArgumentsStub stub(isolate());
290 __ CallStub(&stub);
291 } else if (literal()->has_duplicate_parameters()) {
292 __ Push(a1);
293 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
294 } else {
295 FastNewSloppyArgumentsStub stub(isolate());
296 __ CallStub(&stub);
297 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000298
299 SetVar(arguments, v0, a1, a2);
300 }
301
302 if (FLAG_trace) {
303 __ CallRuntime(Runtime::kTraceEnter);
304 }
305
Ben Murdochda12d292016-06-02 14:46:10 +0100306 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100307 PrepareForBailoutForId(BailoutId::FunctionEntry(),
308 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100309 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100311 VisitDeclarations(scope()->declarations());
312 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000313
Ben Murdochda12d292016-06-02 14:46:10 +0100314 // Assert that the declarations do not use ICs. Otherwise the debugger
315 // won't be able to redirect a PC at an IC to the correct IC in newly
316 // recompiled code.
317 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000318
Ben Murdochda12d292016-06-02 14:46:10 +0100319 {
320 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100321 PrepareForBailoutForId(BailoutId::Declarations(),
322 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100323 Label ok;
324 __ LoadRoot(at, Heap::kStackLimitRootIndex);
325 __ Branch(&ok, hs, sp, Operand(at));
326 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
327 PredictableCodeSizeScope predictable(
328 masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
329 __ Call(stack_check, RelocInfo::CODE_TARGET);
330 __ bind(&ok);
331 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332
Ben Murdochda12d292016-06-02 14:46:10 +0100333 {
334 Comment cmnt(masm_, "[ Body");
335 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000336
Ben Murdochda12d292016-06-02 14:46:10 +0100337 VisitStatements(literal()->body());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000338
Ben Murdochda12d292016-06-02 14:46:10 +0100339 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 }
341
342 // Always emit a 'return undefined' in case control fell off the end of
343 // the body.
344 { Comment cmnt(masm_, "[ return <undefined>;");
345 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
346 }
347 EmitReturnSequence();
348}
349
350
351void FullCodeGenerator::ClearAccumulator() {
352 DCHECK(Smi::FromInt(0) == 0);
353 __ mov(v0, zero_reg);
354}
355
356
357void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
358 __ li(a2, Operand(profiling_counter_));
359 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
360 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
361 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
362}
363
364
365void FullCodeGenerator::EmitProfilingCounterReset() {
366 int reset_value = FLAG_interrupt_budget;
367 if (info_->is_debug()) {
368 // Detect debug break requests as soon as possible.
369 reset_value = FLAG_interrupt_budget >> 4;
370 }
371 __ li(a2, Operand(profiling_counter_));
372 __ li(a3, Operand(Smi::FromInt(reset_value)));
373 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
374}
375
376
377void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
378 Label* back_edge_target) {
379 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
380 // to make sure it is constant. Branch may emit a skip-or-jump sequence
381 // instead of the normal Branch. It seems that the "skip" part of that
382 // sequence is about as long as this Branch would be so it is safe to ignore
383 // that.
384 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
385 Comment cmnt(masm_, "[ Back edge bookkeeping");
386 Label ok;
387 DCHECK(back_edge_target->is_bound());
388 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
389 int weight = Min(kMaxBackEdgeWeight,
390 Max(1, distance / kCodeSizeMultiplier));
391 EmitProfilingCounterDecrement(weight);
392 __ slt(at, a3, zero_reg);
393 __ beq(at, zero_reg, &ok);
394 // Call will emit a li t9 first, so it is safe to use the delay slot.
395 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
396 // Record a mapping of this PC offset to the OSR id. This is used to find
397 // the AST id from the unoptimized code in order to use it as a key into
398 // the deoptimization input data found in the optimized code.
399 RecordBackEdge(stmt->OsrEntryId());
400 EmitProfilingCounterReset();
401
402 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100403 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000404 // Record a mapping of the OSR id to this PC. This is used if the OSR
405 // entry becomes the target of a bailout. We don't expect it to be, but
406 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100407 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000408}
409
Ben Murdoch097c5b22016-05-18 11:27:45 +0100410void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
411 bool is_tail_call) {
412 // Pretend that the exit is a backwards jump to the entry.
413 int weight = 1;
414 if (info_->ShouldSelfOptimize()) {
415 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
416 } else {
417 int distance = masm_->pc_offset();
418 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
419 }
420 EmitProfilingCounterDecrement(weight);
421 Label ok;
422 __ Branch(&ok, ge, a3, Operand(zero_reg));
423 // Don't need to save result register if we are going to do a tail call.
424 if (!is_tail_call) {
425 __ push(v0);
426 }
427 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
428 if (!is_tail_call) {
429 __ pop(v0);
430 }
431 EmitProfilingCounterReset();
432 __ bind(&ok);
433}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000434
435void FullCodeGenerator::EmitReturnSequence() {
436 Comment cmnt(masm_, "[ Return sequence");
437 if (return_label_.is_bound()) {
438 __ Branch(&return_label_);
439 } else {
440 __ bind(&return_label_);
441 if (FLAG_trace) {
442 // Push the return value on the stack as the parameter.
443 // Runtime::TraceExit returns its parameter in v0.
444 __ push(v0);
445 __ CallRuntime(Runtime::kTraceExit);
446 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100447 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448
449 // Make sure that the constant pool is not emitted inside of the return
450 // sequence.
451 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
452 // Here we use masm_-> instead of the __ macro to avoid the code coverage
453 // tool from instrumenting as we rely on the code size here.
454 int32_t arg_count = info_->scope()->num_parameters() + 1;
455 int32_t sp_delta = arg_count * kPointerSize;
456 SetReturnPosition(literal());
457 masm_->mov(sp, fp);
458 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
459 masm_->Daddu(sp, sp, Operand(sp_delta));
460 masm_->Jump(ra);
461 }
462 }
463}
464
Ben Murdochc5610432016-08-08 18:44:38 +0100465void FullCodeGenerator::RestoreContext() {
466 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
467}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000468
469void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
470 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
471 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100472 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000473}
474
475
476void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
477}
478
479
480void FullCodeGenerator::AccumulatorValueContext::Plug(
481 Heap::RootListIndex index) const {
482 __ LoadRoot(result_register(), index);
483}
484
485
486void FullCodeGenerator::StackValueContext::Plug(
487 Heap::RootListIndex index) const {
488 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100489 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000490}
491
492
493void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
494 codegen()->PrepareForBailoutBeforeSplit(condition(),
495 true,
496 true_label_,
497 false_label_);
498 if (index == Heap::kUndefinedValueRootIndex ||
499 index == Heap::kNullValueRootIndex ||
500 index == Heap::kFalseValueRootIndex) {
501 if (false_label_ != fall_through_) __ Branch(false_label_);
502 } else if (index == Heap::kTrueValueRootIndex) {
503 if (true_label_ != fall_through_) __ Branch(true_label_);
504 } else {
505 __ LoadRoot(result_register(), index);
506 codegen()->DoTest(this);
507 }
508}
509
510
511void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
512}
513
514
515void FullCodeGenerator::AccumulatorValueContext::Plug(
516 Handle<Object> lit) const {
517 __ li(result_register(), Operand(lit));
518}
519
520
521void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
522 // Immediates cannot be pushed directly.
523 __ li(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100524 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000525}
526
527
528void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
529 codegen()->PrepareForBailoutBeforeSplit(condition(),
530 true,
531 true_label_,
532 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100533 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
535 if (false_label_ != fall_through_) __ Branch(false_label_);
536 } else if (lit->IsTrue() || lit->IsJSObject()) {
537 if (true_label_ != fall_through_) __ Branch(true_label_);
538 } else if (lit->IsString()) {
539 if (String::cast(*lit)->length() == 0) {
540 if (false_label_ != fall_through_) __ Branch(false_label_);
541 } else {
542 if (true_label_ != fall_through_) __ Branch(true_label_);
543 }
544 } else if (lit->IsSmi()) {
545 if (Smi::cast(*lit)->value() == 0) {
546 if (false_label_ != fall_through_) __ Branch(false_label_);
547 } else {
548 if (true_label_ != fall_through_) __ Branch(true_label_);
549 }
550 } else {
551 // For simplicity we always test the accumulator register.
552 __ li(result_register(), Operand(lit));
553 codegen()->DoTest(this);
554 }
555}
556
557
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000558void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
559 Register reg) const {
560 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100561 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000562 __ sd(reg, MemOperand(sp, 0));
563}
564
565
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000566void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
567 Label* materialize_false) const {
568 DCHECK(materialize_true == materialize_false);
569 __ bind(materialize_true);
570}
571
572
573void FullCodeGenerator::AccumulatorValueContext::Plug(
574 Label* materialize_true,
575 Label* materialize_false) const {
576 Label done;
577 __ bind(materialize_true);
578 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
579 __ Branch(&done);
580 __ bind(materialize_false);
581 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
582 __ bind(&done);
583}
584
585
586void FullCodeGenerator::StackValueContext::Plug(
587 Label* materialize_true,
588 Label* materialize_false) const {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100589 codegen()->OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 Label done;
591 __ bind(materialize_true);
592 __ LoadRoot(at, Heap::kTrueValueRootIndex);
593 // Push the value as the following branch can clobber at in long branch mode.
594 __ push(at);
595 __ Branch(&done);
596 __ bind(materialize_false);
597 __ LoadRoot(at, Heap::kFalseValueRootIndex);
598 __ push(at);
599 __ bind(&done);
600}
601
602
603void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
604 Label* materialize_false) const {
605 DCHECK(materialize_true == true_label_);
606 DCHECK(materialize_false == false_label_);
607}
608
609
610void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
611 Heap::RootListIndex value_root_index =
612 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
613 __ LoadRoot(result_register(), value_root_index);
614}
615
616
617void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
618 Heap::RootListIndex value_root_index =
619 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
620 __ LoadRoot(at, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100621 codegen()->PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000622}
623
624
625void FullCodeGenerator::TestContext::Plug(bool flag) const {
626 codegen()->PrepareForBailoutBeforeSplit(condition(),
627 true,
628 true_label_,
629 false_label_);
630 if (flag) {
631 if (true_label_ != fall_through_) __ Branch(true_label_);
632 } else {
633 if (false_label_ != fall_through_) __ Branch(false_label_);
634 }
635}
636
637
638void FullCodeGenerator::DoTest(Expression* condition,
639 Label* if_true,
640 Label* if_false,
641 Label* fall_through) {
642 __ mov(a0, result_register());
Ben Murdochda12d292016-06-02 14:46:10 +0100643 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 CallIC(ic, condition->test_id());
645 __ LoadRoot(at, Heap::kTrueValueRootIndex);
646 Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
647}
648
649
650void FullCodeGenerator::Split(Condition cc,
651 Register lhs,
652 const Operand& rhs,
653 Label* if_true,
654 Label* if_false,
655 Label* fall_through) {
656 if (if_false == fall_through) {
657 __ Branch(if_true, cc, lhs, rhs);
658 } else if (if_true == fall_through) {
659 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
660 } else {
661 __ Branch(if_true, cc, lhs, rhs);
662 __ Branch(if_false);
663 }
664}
665
666
667MemOperand FullCodeGenerator::StackOperand(Variable* var) {
668 DCHECK(var->IsStackAllocated());
669 // Offset is negative because higher indexes are at lower addresses.
670 int offset = -var->index() * kPointerSize;
671 // Adjust by a (parameter or local) base offset.
672 if (var->IsParameter()) {
673 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
674 } else {
675 offset += JavaScriptFrameConstants::kLocal0Offset;
676 }
677 return MemOperand(fp, offset);
678}
679
680
681MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
682 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
683 if (var->IsContextSlot()) {
684 int context_chain_length = scope()->ContextChainLength(var->scope());
685 __ LoadContext(scratch, context_chain_length);
686 return ContextMemOperand(scratch, var->index());
687 } else {
688 return StackOperand(var);
689 }
690}
691
692
693void FullCodeGenerator::GetVar(Register dest, Variable* var) {
694 // Use destination as scratch.
695 MemOperand location = VarOperand(var, dest);
696 __ ld(dest, location);
697}
698
699
700void FullCodeGenerator::SetVar(Variable* var,
701 Register src,
702 Register scratch0,
703 Register scratch1) {
704 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
705 DCHECK(!scratch0.is(src));
706 DCHECK(!scratch0.is(scratch1));
707 DCHECK(!scratch1.is(src));
708 MemOperand location = VarOperand(var, scratch0);
709 __ sd(src, location);
710 // Emit the write barrier code if the location is in the heap.
711 if (var->IsContextSlot()) {
712 __ RecordWriteContextSlot(scratch0,
713 location.offset(),
714 src,
715 scratch1,
716 kRAHasBeenSaved,
717 kDontSaveFPRegs);
718 }
719}
720
721
722void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
723 bool should_normalize,
724 Label* if_true,
725 Label* if_false) {
726 // Only prepare for bailouts before splits if we're in a test
727 // context. Otherwise, we let the Visit function deal with the
728 // preparation to avoid preparing with the same AST id twice.
729 if (!context()->IsTest()) return;
730
731 Label skip;
732 if (should_normalize) __ Branch(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100733 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000734 if (should_normalize) {
735 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
736 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
737 __ bind(&skip);
738 }
739}
740
741
742void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
743 // The variable in the declaration always resides in the current function
744 // context.
745 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100746 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 // Check that we're not inside a with or catch context.
748 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
749 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
750 __ Check(ne, kDeclarationInWithContext,
751 a1, Operand(a4));
752 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
753 __ Check(ne, kDeclarationInCatchContext,
754 a1, Operand(a4));
755 }
756}
757
758
759void FullCodeGenerator::VisitVariableDeclaration(
760 VariableDeclaration* declaration) {
761 // If it was not possible to allocate the variable at compile time, we
762 // need to "declare" it at runtime to make sure it actually exists in the
763 // local context.
764 VariableProxy* proxy = declaration->proxy();
765 VariableMode mode = declaration->mode();
766 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100767 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000768 switch (variable->location()) {
769 case VariableLocation::GLOBAL:
770 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100771 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000772 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100773 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000774 break;
775
776 case VariableLocation::PARAMETER:
777 case VariableLocation::LOCAL:
778 if (hole_init) {
779 Comment cmnt(masm_, "[ VariableDeclaration");
780 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
781 __ sd(a4, StackOperand(variable));
782 }
783 break;
784
785 case VariableLocation::CONTEXT:
786 if (hole_init) {
787 Comment cmnt(masm_, "[ VariableDeclaration");
788 EmitDebugCheckDeclarationContext(variable);
789 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
790 __ sd(at, ContextMemOperand(cp, variable->index()));
791 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100792 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000793 }
794 break;
795
796 case VariableLocation::LOOKUP: {
797 Comment cmnt(masm_, "[ VariableDeclaration");
798 __ li(a2, Operand(variable->name()));
799 // Declaration nodes are always introduced in one of four modes.
800 DCHECK(IsDeclaredVariableMode(mode));
801 // Push initial value, if any.
802 // Note: For variables we must not push an initial value (such as
803 // 'undefined') because we may have a (legal) redeclaration and we
804 // must not destroy the current value.
805 if (hole_init) {
806 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
807 } else {
808 DCHECK(Smi::FromInt(0) == 0);
809 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
810 }
811 __ Push(a2, a0);
812 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
813 __ CallRuntime(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100814 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000815 break;
816 }
817 }
818}
819
820
821void FullCodeGenerator::VisitFunctionDeclaration(
822 FunctionDeclaration* declaration) {
823 VariableProxy* proxy = declaration->proxy();
824 Variable* variable = proxy->var();
825 switch (variable->location()) {
826 case VariableLocation::GLOBAL:
827 case VariableLocation::UNALLOCATED: {
828 globals_->Add(variable->name(), zone());
829 Handle<SharedFunctionInfo> function =
830 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
831 // Check for stack-overflow exception.
832 if (function.is_null()) return SetStackOverflow();
833 globals_->Add(function, zone());
834 break;
835 }
836
837 case VariableLocation::PARAMETER:
838 case VariableLocation::LOCAL: {
839 Comment cmnt(masm_, "[ FunctionDeclaration");
840 VisitForAccumulatorValue(declaration->fun());
841 __ sd(result_register(), StackOperand(variable));
842 break;
843 }
844
845 case VariableLocation::CONTEXT: {
846 Comment cmnt(masm_, "[ FunctionDeclaration");
847 EmitDebugCheckDeclarationContext(variable);
848 VisitForAccumulatorValue(declaration->fun());
849 __ sd(result_register(), ContextMemOperand(cp, variable->index()));
850 int offset = Context::SlotOffset(variable->index());
851 // We know that we have written a function, which is not a smi.
852 __ RecordWriteContextSlot(cp,
853 offset,
854 result_register(),
855 a2,
856 kRAHasBeenSaved,
857 kDontSaveFPRegs,
858 EMIT_REMEMBERED_SET,
859 OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100860 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000861 break;
862 }
863
864 case VariableLocation::LOOKUP: {
865 Comment cmnt(masm_, "[ FunctionDeclaration");
866 __ li(a2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100867 PushOperand(a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000868 // Push initial value for function declaration.
869 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100870 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
871 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100872 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873 break;
874 }
875 }
876}
877
878
879void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
880 // Call the runtime to declare the globals.
881 __ li(a1, Operand(pairs));
882 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
883 __ Push(a1, a0);
884 __ CallRuntime(Runtime::kDeclareGlobals);
885 // Return value is ignored.
886}
887
888
889void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
890 // Call the runtime to declare the modules.
891 __ Push(descriptions);
892 __ CallRuntime(Runtime::kDeclareModules);
893 // Return value is ignored.
894}
895
896
897void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
898 Comment cmnt(masm_, "[ SwitchStatement");
899 Breakable nested_statement(this, stmt);
900 SetStatementPosition(stmt);
901
902 // Keep the switch value on the stack until a case matches.
903 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100904 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000905
906 ZoneList<CaseClause*>* clauses = stmt->cases();
907 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
908
909 Label next_test; // Recycled for each test.
910 // Compile all the tests with branches to their bodies.
911 for (int i = 0; i < clauses->length(); i++) {
912 CaseClause* clause = clauses->at(i);
913 clause->body_target()->Unuse();
914
915 // The default is not a test, but remember it as final fall through.
916 if (clause->is_default()) {
917 default_clause = clause;
918 continue;
919 }
920
921 Comment cmnt(masm_, "[ Case comparison");
922 __ bind(&next_test);
923 next_test.Unuse();
924
925 // Compile the label expression.
926 VisitForAccumulatorValue(clause->label());
927 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
928
929 // Perform the comparison as if via '==='.
930 __ ld(a1, MemOperand(sp, 0)); // Switch value.
931 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
932 JumpPatchSite patch_site(masm_);
933 if (inline_smi_code) {
934 Label slow_case;
935 __ or_(a2, a1, a0);
936 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
937
938 __ Branch(&next_test, ne, a1, Operand(a0));
939 __ Drop(1); // Switch value is no longer needed.
940 __ Branch(clause->body_target());
941
942 __ bind(&slow_case);
943 }
944
945 // Record position before stub call for type feedback.
946 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100947 Handle<Code> ic =
948 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000949 CallIC(ic, clause->CompareId());
950 patch_site.EmitPatchInfo();
951
952 Label skip;
953 __ Branch(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100954 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000955 __ LoadRoot(at, Heap::kTrueValueRootIndex);
956 __ Branch(&next_test, ne, v0, Operand(at));
957 __ Drop(1);
958 __ Branch(clause->body_target());
959 __ bind(&skip);
960
961 __ Branch(&next_test, ne, v0, Operand(zero_reg));
962 __ Drop(1); // Switch value is no longer needed.
963 __ Branch(clause->body_target());
964 }
965
966 // Discard the test value and jump to the default if present, otherwise to
967 // the end of the statement.
968 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100969 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000970 if (default_clause == NULL) {
971 __ Branch(nested_statement.break_label());
972 } else {
973 __ Branch(default_clause->body_target());
974 }
975
976 // Compile all the case bodies.
977 for (int i = 0; i < clauses->length(); i++) {
978 Comment cmnt(masm_, "[ Case body");
979 CaseClause* clause = clauses->at(i);
980 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100981 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000982 VisitStatements(clause->statements());
983 }
984
985 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100986 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000987}
988
989
990void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
991 Comment cmnt(masm_, "[ ForInStatement");
992 SetStatementPosition(stmt, SKIP_BREAK);
993
994 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
995
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996 // Get the object to enumerate over. If the object is null or undefined, skip
997 // over the loop. See ECMA-262 version 5, section 12.6.4.
998 SetExpressionAsStatementPosition(stmt->enumerable());
999 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001000 __ mov(a0, result_register());
Ben Murdochda12d292016-06-02 14:46:10 +01001001 OperandStackDepthIncrement(5);
1002
1003 Label loop, exit;
1004 Iteration loop_statement(this, stmt);
1005 increment_loop_depth();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001006
1007 // If the object is null or undefined, skip over the loop, otherwise convert
1008 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001009 Label convert, done_convert;
1010 __ JumpIfSmi(a0, &convert);
1011 __ GetObjectType(a0, a1, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001012 __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1013 Operand(FIRST_JS_RECEIVER_TYPE));
1014 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot.
1015 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1016 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot.
1017 __ Branch(&exit, eq, a0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001018 __ bind(&convert);
1019 ToObjectStub stub(isolate());
1020 __ CallStub(&stub);
1021 __ mov(a0, v0);
1022 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +01001023 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001024 __ push(a0);
1025
Ben Murdochc5610432016-08-08 18:44:38 +01001026 // Check cache validity in generated code. If we cannot guarantee cache
1027 // validity, call the runtime system to check cache validity or get the
1028 // property names in a fixed array. Note: Proxies never have an enum cache,
1029 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001030 Label call_runtime;
1031 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001032
1033 // The enum cache is valid. Load the map of the object being
1034 // iterated over and use the cache for the iteration.
1035 Label use_cache;
1036 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1037 __ Branch(&use_cache);
1038
1039 // Get the set of properties to enumerate.
1040 __ bind(&call_runtime);
1041 __ push(a0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001042 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +01001043 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044
1045 // If we got a map from the runtime call, we can do a fast
1046 // modification check. Otherwise, we got a fixed array, and we have
1047 // to do a slow check.
1048 Label fixed_array;
1049 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1050 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1051 __ Branch(&fixed_array, ne, a2, Operand(at));
1052
1053 // We got a map in register v0. Get the enumeration cache from it.
1054 Label no_descriptors;
1055 __ bind(&use_cache);
1056
1057 __ EnumLength(a1, v0);
1058 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1059
1060 __ LoadInstanceDescriptors(v0, a2);
1061 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1062 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1063
1064 // Set up the four remaining stack slots.
1065 __ li(a0, Operand(Smi::FromInt(0)));
1066 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1067 __ Push(v0, a2, a1, a0);
1068 __ jmp(&loop);
1069
1070 __ bind(&no_descriptors);
1071 __ Drop(1);
1072 __ jmp(&exit);
1073
1074 // We got a fixed array in register v0. Iterate through that.
1075 __ bind(&fixed_array);
1076
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1078 __ Push(a1, v0); // Smi and array
1079 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001080 __ Push(a1); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001081 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001082 __ li(a0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001083 __ Push(a0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001084
1085 // Generate code for doing the condition check.
1086 __ bind(&loop);
1087 SetExpressionAsStatementPosition(stmt->each());
1088
1089 // Load the current count to a0, load the length to a1.
1090 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1091 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1092 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1093
1094 // Get the current entry of the array into register a3.
1095 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1096 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1097 __ SmiScale(a4, a0, kPointerSizeLog2);
1098 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1099 __ ld(a3, MemOperand(a4)); // Current entry.
1100
1101 // Get the expected map from the stack or a smi in the
1102 // permanent slow case into register a2.
1103 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1104
1105 // Check if the expected map still matches that of the enumerable.
1106 // If not, we may have to filter the key.
1107 Label update_each;
1108 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1109 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1110 __ Branch(&update_each, eq, a4, Operand(a2));
1111
Ben Murdochda12d292016-06-02 14:46:10 +01001112 // We need to filter the key, record slow-path here.
1113 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001114 __ EmitLoadTypeFeedbackVector(a0);
1115 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1116 __ sd(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index)));
1117
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001118 // Convert the entry to a string or (smi) 0 if it isn't a property
1119 // any more. If the property has been removed while iterating, we
1120 // just skip it.
1121 __ Push(a1, a3); // Enumerable and current entry.
1122 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001123 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001124 __ mov(a3, result_register());
1125 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1126 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1127
1128 // Update the 'each' property or variable from the possibly filtered
1129 // entry in register a3.
1130 __ bind(&update_each);
1131 __ mov(result_register(), a3);
1132 // Perform the assignment as if via '='.
1133 { EffectContext context(this);
1134 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001135 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001136 }
1137
1138 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001139 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001140 // Generate code for the body of the loop.
1141 Visit(stmt->body());
1142
1143 // Generate code for the going to the next element by incrementing
1144 // the index (smi) stored on top of the stack.
1145 __ bind(loop_statement.continue_label());
1146 __ pop(a0);
1147 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1148 __ push(a0);
1149
1150 EmitBackEdgeBookkeeping(stmt, &loop);
1151 __ Branch(&loop);
1152
1153 // Remove the pointers stored on the stack.
1154 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001155 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001156
1157 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001158 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001159 __ bind(&exit);
1160 decrement_loop_depth();
1161}
1162
1163
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001164void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1165 FeedbackVectorSlot slot) {
1166 DCHECK(NeedsHomeObject(initializer));
1167 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1168 __ li(StoreDescriptor::NameRegister(),
1169 Operand(isolate()->factory()->home_object_symbol()));
1170 __ ld(StoreDescriptor::ValueRegister(),
1171 MemOperand(sp, offset * kPointerSize));
1172 EmitLoadStoreICSlot(slot);
1173 CallStoreIC();
1174}
1175
1176
1177void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1178 int offset,
1179 FeedbackVectorSlot slot) {
1180 DCHECK(NeedsHomeObject(initializer));
1181 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1182 __ li(StoreDescriptor::NameRegister(),
1183 Operand(isolate()->factory()->home_object_symbol()));
1184 __ ld(StoreDescriptor::ValueRegister(),
1185 MemOperand(sp, offset * kPointerSize));
1186 EmitLoadStoreICSlot(slot);
1187 CallStoreIC();
1188}
1189
1190
1191void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1192 TypeofMode typeof_mode,
1193 Label* slow) {
1194 Register current = cp;
1195 Register next = a1;
1196 Register temp = a2;
1197
1198 Scope* s = scope();
1199 while (s != NULL) {
1200 if (s->num_heap_slots() > 0) {
1201 if (s->calls_sloppy_eval()) {
1202 // Check that extension is "the hole".
1203 __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1204 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1205 }
1206 // Load next context in chain.
1207 __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1208 // Walk the rest of the chain without clobbering cp.
1209 current = next;
1210 }
1211 // If no outer scope calls eval, we do not need to check more
1212 // context extensions.
1213 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1214 s = s->outer_scope();
1215 }
1216
1217 if (s->is_eval_scope()) {
1218 Label loop, fast;
1219 if (!current.is(next)) {
1220 __ Move(next, current);
1221 }
1222 __ bind(&loop);
1223 // Terminate at native context.
1224 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1225 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1226 __ Branch(&fast, eq, temp, Operand(a4));
1227 // Check that extension is "the hole".
1228 __ ld(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1229 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1230 // Load next context in chain.
1231 __ ld(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1232 __ Branch(&loop);
1233 __ bind(&fast);
1234 }
1235
1236 // All extension objects were empty and it is safe to use a normal global
1237 // load machinery.
1238 EmitGlobalVariableLoad(proxy, typeof_mode);
1239}
1240
1241
1242MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1243 Label* slow) {
1244 DCHECK(var->IsContextSlot());
1245 Register context = cp;
1246 Register next = a3;
1247 Register temp = a4;
1248
1249 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1250 if (s->num_heap_slots() > 0) {
1251 if (s->calls_sloppy_eval()) {
1252 // Check that extension is "the hole".
1253 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1254 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1255 }
1256 __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1257 // Walk the rest of the chain without clobbering cp.
1258 context = next;
1259 }
1260 }
1261 // Check that last extension is "the hole".
1262 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1263 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1264
1265 // This function is used only for loads, not stores, so it's safe to
1266 // return an cp-based operand (the write barrier cannot be allowed to
1267 // destroy the cp register).
1268 return ContextMemOperand(context, var->index());
1269}
1270
1271
1272void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1273 TypeofMode typeof_mode,
1274 Label* slow, Label* done) {
1275 // Generate fast-case code for variables that might be shadowed by
1276 // eval-introduced variables. Eval is used a lot without
1277 // introducing variables. In those cases, we do not want to
1278 // perform a runtime call for all variables in the scope
1279 // containing the eval.
1280 Variable* var = proxy->var();
1281 if (var->mode() == DYNAMIC_GLOBAL) {
1282 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1283 __ Branch(done);
1284 } else if (var->mode() == DYNAMIC_LOCAL) {
1285 Variable* local = var->local_if_not_shadowed();
1286 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001287 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001288 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1289 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
Ben Murdochc5610432016-08-08 18:44:38 +01001290 __ Branch(done, ne, at, Operand(zero_reg));
1291 __ li(a0, Operand(var->name()));
1292 __ push(a0);
1293 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001294 }
1295 __ Branch(done);
1296 }
1297}
1298
1299
1300void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1301 TypeofMode typeof_mode) {
1302 Variable* var = proxy->var();
1303 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1304 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1305 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1306 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1307 __ li(LoadDescriptor::SlotRegister(),
1308 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1309 CallLoadIC(typeof_mode);
1310}
1311
1312
1313void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1314 TypeofMode typeof_mode) {
1315 // Record position before possible IC call.
1316 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001317 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001318 Variable* var = proxy->var();
1319
1320 // Three cases: global variables, lookup variables, and all other types of
1321 // variables.
1322 switch (var->location()) {
1323 case VariableLocation::GLOBAL:
1324 case VariableLocation::UNALLOCATED: {
1325 Comment cmnt(masm_, "[ Global variable");
1326 EmitGlobalVariableLoad(proxy, typeof_mode);
1327 context()->Plug(v0);
1328 break;
1329 }
1330
1331 case VariableLocation::PARAMETER:
1332 case VariableLocation::LOCAL:
1333 case VariableLocation::CONTEXT: {
1334 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1335 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1336 : "[ Stack variable");
1337 if (NeedsHoleCheckForLoad(proxy)) {
1338 // Let and const need a read barrier.
1339 GetVar(v0, var);
1340 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1341 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1342 if (var->mode() == LET || var->mode() == CONST) {
1343 // Throw a reference error when using an uninitialized let/const
1344 // binding in harmony mode.
1345 Label done;
1346 __ Branch(&done, ne, at, Operand(zero_reg));
1347 __ li(a0, Operand(var->name()));
1348 __ push(a0);
1349 __ CallRuntime(Runtime::kThrowReferenceError);
1350 __ bind(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001351 }
1352 context()->Plug(v0);
1353 break;
1354 }
1355 context()->Plug(var);
1356 break;
1357 }
1358
1359 case VariableLocation::LOOKUP: {
1360 Comment cmnt(masm_, "[ Lookup variable");
1361 Label done, slow;
1362 // Generate code for loading from variables potentially shadowed
1363 // by eval-introduced variables.
1364 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1365 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001366 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367 Runtime::FunctionId function_id =
1368 typeof_mode == NOT_INSIDE_TYPEOF
1369 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001370 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001371 __ CallRuntime(function_id);
1372 __ bind(&done);
1373 context()->Plug(v0);
1374 }
1375 }
1376}
1377
1378
1379void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1380 Comment cmnt(masm_, "[ RegExpLiteral");
1381 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1382 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1383 __ li(a1, Operand(expr->pattern()));
1384 __ li(a0, Operand(Smi::FromInt(expr->flags())));
1385 FastCloneRegExpStub stub(isolate());
1386 __ CallStub(&stub);
1387 context()->Plug(v0);
1388}
1389
1390
1391void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1392 Expression* expression = (property == NULL) ? NULL : property->value();
1393 if (expression == NULL) {
1394 __ LoadRoot(a1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001395 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001396 } else {
1397 VisitForStackValue(expression);
1398 if (NeedsHomeObject(expression)) {
1399 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1400 property->kind() == ObjectLiteral::Property::SETTER);
1401 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1402 EmitSetHomeObject(expression, offset, property->GetSlot());
1403 }
1404 }
1405}
1406
1407
1408void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1409 Comment cmnt(masm_, "[ ObjectLiteral");
1410
1411 Handle<FixedArray> constant_properties = expr->constant_properties();
1412 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1413 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1414 __ li(a1, Operand(constant_properties));
1415 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1416 if (MustCreateObjectLiteralWithRuntime(expr)) {
1417 __ Push(a3, a2, a1, a0);
1418 __ CallRuntime(Runtime::kCreateObjectLiteral);
1419 } else {
1420 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1421 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001422 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001423 }
Ben Murdochc5610432016-08-08 18:44:38 +01001424 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001425
1426 // If result_saved is true the result is on top of the stack. If
1427 // result_saved is false the result is in v0.
1428 bool result_saved = false;
1429
1430 AccessorTable accessor_table(zone());
1431 int property_index = 0;
1432 for (; property_index < expr->properties()->length(); property_index++) {
1433 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1434 if (property->is_computed_name()) break;
1435 if (property->IsCompileTimeValue()) continue;
1436
1437 Literal* key = property->key()->AsLiteral();
1438 Expression* value = property->value();
1439 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001440 PushOperand(v0); // Save result on stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001441 result_saved = true;
1442 }
1443 switch (property->kind()) {
1444 case ObjectLiteral::Property::CONSTANT:
1445 UNREACHABLE();
1446 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1447 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1448 // Fall through.
1449 case ObjectLiteral::Property::COMPUTED:
1450 // It is safe to use [[Put]] here because the boilerplate already
1451 // contains computed properties with an uninitialized value.
1452 if (key->value()->IsInternalizedString()) {
1453 if (property->emit_store()) {
1454 VisitForAccumulatorValue(value);
1455 __ mov(StoreDescriptor::ValueRegister(), result_register());
1456 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1457 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1458 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1459 EmitLoadStoreICSlot(property->GetSlot(0));
1460 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001461 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001462
1463 if (NeedsHomeObject(value)) {
1464 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1465 }
1466 } else {
1467 VisitForEffect(value);
1468 }
1469 break;
1470 }
1471 // Duplicate receiver on stack.
1472 __ ld(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001473 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 VisitForStackValue(key);
1475 VisitForStackValue(value);
1476 if (property->emit_store()) {
1477 if (NeedsHomeObject(value)) {
1478 EmitSetHomeObject(value, 2, property->GetSlot());
1479 }
1480 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001481 PushOperand(a0);
1482 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001483 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001484 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001485 }
1486 break;
1487 case ObjectLiteral::Property::PROTOTYPE:
1488 // Duplicate receiver on stack.
1489 __ ld(a0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001490 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001491 VisitForStackValue(value);
1492 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001493 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001494 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001495 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496 break;
1497 case ObjectLiteral::Property::GETTER:
1498 if (property->emit_store()) {
1499 accessor_table.lookup(key)->second->getter = property;
1500 }
1501 break;
1502 case ObjectLiteral::Property::SETTER:
1503 if (property->emit_store()) {
1504 accessor_table.lookup(key)->second->setter = property;
1505 }
1506 break;
1507 }
1508 }
1509
1510 // Emit code to define accessors, using only a single call to the runtime for
1511 // each pair of corresponding getters and setters.
1512 for (AccessorTable::Iterator it = accessor_table.begin();
1513 it != accessor_table.end();
1514 ++it) {
1515 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001516 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001517 VisitForStackValue(it->first);
1518 EmitAccessor(it->second->getter);
1519 EmitAccessor(it->second->setter);
1520 __ li(a0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001521 PushOperand(a0);
1522 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001523 }
1524
1525 // Object literals have two parts. The "static" part on the left contains no
1526 // computed property names, and so we can compute its map ahead of time; see
1527 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1528 // starts with the first computed property name, and continues with all
1529 // properties to its right. All the code from above initializes the static
1530 // component of the object literal, and arranges for the map of the result to
1531 // reflect the static order in which the keys appear. For the dynamic
1532 // properties, we compile them into a series of "SetOwnProperty" runtime
1533 // calls. This will preserve insertion order.
1534 for (; property_index < expr->properties()->length(); property_index++) {
1535 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1536
1537 Expression* value = property->value();
1538 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001539 PushOperand(v0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540 result_saved = true;
1541 }
1542
1543 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001544 PushOperand(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001545
1546 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1547 DCHECK(!property->is_computed_name());
1548 VisitForStackValue(value);
1549 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001550 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001551 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001552 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001553 } else {
1554 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1555 VisitForStackValue(value);
1556 if (NeedsHomeObject(value)) {
1557 EmitSetHomeObject(value, 2, property->GetSlot());
1558 }
1559
1560 switch (property->kind()) {
1561 case ObjectLiteral::Property::CONSTANT:
1562 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1563 case ObjectLiteral::Property::COMPUTED:
1564 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001565 PushOperand(Smi::FromInt(NONE));
1566 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1567 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001568 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001569 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001570 }
1571 break;
1572
1573 case ObjectLiteral::Property::PROTOTYPE:
1574 UNREACHABLE();
1575 break;
1576
1577 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001578 PushOperand(Smi::FromInt(NONE));
1579 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001580 break;
1581
1582 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001583 PushOperand(Smi::FromInt(NONE));
1584 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 break;
1586 }
1587 }
1588 }
1589
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590 if (result_saved) {
1591 context()->PlugTOS();
1592 } else {
1593 context()->Plug(v0);
1594 }
1595}
1596
1597
1598void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1599 Comment cmnt(masm_, "[ ArrayLiteral");
1600
1601 Handle<FixedArray> constant_elements = expr->constant_elements();
1602 bool has_fast_elements =
1603 IsFastObjectElementsKind(expr->constant_elements_kind());
1604
1605 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1606 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1607 // If the only customer of allocation sites is transitioning, then
1608 // we can turn it off if we don't have anywhere else to transition to.
1609 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1610 }
1611
1612 __ mov(a0, result_register());
1613 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1614 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1615 __ li(a1, Operand(constant_elements));
1616 if (MustCreateArrayLiteralWithRuntime(expr)) {
1617 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1618 __ Push(a3, a2, a1, a0);
1619 __ CallRuntime(Runtime::kCreateArrayLiteral);
1620 } else {
1621 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1622 __ CallStub(&stub);
1623 }
Ben Murdochc5610432016-08-08 18:44:38 +01001624 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625
1626 bool result_saved = false; // Is the result saved to the stack?
1627 ZoneList<Expression*>* subexprs = expr->values();
1628 int length = subexprs->length();
1629
1630 // Emit code to evaluate all the non-constant subexpressions and to store
1631 // them into the newly cloned array.
1632 int array_index = 0;
1633 for (; array_index < length; array_index++) {
1634 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001635 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001636
1637 // If the subexpression is a literal or a simple materialized literal it
1638 // is already set in the cloned array.
1639 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1640
1641 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001642 PushOperand(v0); // array literal
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001643 result_saved = true;
1644 }
1645
1646 VisitForAccumulatorValue(subexpr);
1647
1648 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1649 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1650 __ mov(StoreDescriptor::ValueRegister(), result_register());
1651 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1652 Handle<Code> ic =
1653 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1654 CallIC(ic);
1655
Ben Murdochc5610432016-08-08 18:44:38 +01001656 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1657 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001658 }
1659
1660 // In case the array literal contains spread expressions it has two parts. The
1661 // first part is the "static" array which has a literal index is handled
1662 // above. The second part is the part after the first spread expression
1663 // (inclusive) and these elements gets appended to the array. Note that the
1664 // number elements an iterable produces is unknown ahead of time.
1665 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001666 PopOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001667 result_saved = false;
1668 }
1669 for (; array_index < length; array_index++) {
1670 Expression* subexpr = subexprs->at(array_index);
1671
Ben Murdoch097c5b22016-05-18 11:27:45 +01001672 PushOperand(v0);
1673 DCHECK(!subexpr->IsSpread());
1674 VisitForStackValue(subexpr);
1675 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001676
Ben Murdochc5610432016-08-08 18:44:38 +01001677 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1678 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001679 }
1680
1681 if (result_saved) {
1682 context()->PlugTOS();
1683 } else {
1684 context()->Plug(v0);
1685 }
1686}
1687
1688
1689void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1690 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1691
1692 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693
1694 Property* property = expr->target()->AsProperty();
1695 LhsKind assign_type = Property::GetAssignType(property);
1696
1697 // Evaluate LHS expression.
1698 switch (assign_type) {
1699 case VARIABLE:
1700 // Nothing to do here.
1701 break;
1702 case NAMED_PROPERTY:
1703 if (expr->is_compound()) {
1704 // We need the receiver both on the stack and in the register.
1705 VisitForStackValue(property->obj());
1706 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1707 } else {
1708 VisitForStackValue(property->obj());
1709 }
1710 break;
1711 case NAMED_SUPER_PROPERTY:
1712 VisitForStackValue(
1713 property->obj()->AsSuperPropertyReference()->this_var());
1714 VisitForAccumulatorValue(
1715 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001716 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001717 if (expr->is_compound()) {
1718 const Register scratch = a1;
1719 __ ld(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001720 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721 }
1722 break;
1723 case KEYED_SUPER_PROPERTY: {
1724 const Register scratch = a1;
1725 VisitForStackValue(
1726 property->obj()->AsSuperPropertyReference()->this_var());
1727 VisitForAccumulatorValue(
1728 property->obj()->AsSuperPropertyReference()->home_object());
1729 __ Move(scratch, result_register());
1730 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001731 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 if (expr->is_compound()) {
1733 const Register scratch1 = a4;
1734 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001735 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001736 }
1737 break;
1738 }
1739 case KEYED_PROPERTY:
1740 // We need the key and receiver on both the stack and in v0 and a1.
1741 if (expr->is_compound()) {
1742 VisitForStackValue(property->obj());
1743 VisitForStackValue(property->key());
1744 __ ld(LoadDescriptor::ReceiverRegister(),
1745 MemOperand(sp, 1 * kPointerSize));
1746 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1747 } else {
1748 VisitForStackValue(property->obj());
1749 VisitForStackValue(property->key());
1750 }
1751 break;
1752 }
1753
1754 // For compound assignments we need another deoptimization point after the
1755 // variable/property load.
1756 if (expr->is_compound()) {
1757 { AccumulatorValueContext context(this);
1758 switch (assign_type) {
1759 case VARIABLE:
1760 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001761 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001762 break;
1763 case NAMED_PROPERTY:
1764 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001765 PrepareForBailoutForId(property->LoadId(),
1766 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001767 break;
1768 case NAMED_SUPER_PROPERTY:
1769 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001770 PrepareForBailoutForId(property->LoadId(),
1771 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001772 break;
1773 case KEYED_SUPER_PROPERTY:
1774 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001775 PrepareForBailoutForId(property->LoadId(),
1776 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001777 break;
1778 case KEYED_PROPERTY:
1779 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001780 PrepareForBailoutForId(property->LoadId(),
1781 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001782 break;
1783 }
1784 }
1785
1786 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001787 PushOperand(v0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001788 VisitForAccumulatorValue(expr->value());
1789
1790 AccumulatorValueContext context(this);
1791 if (ShouldInlineSmiCase(op)) {
1792 EmitInlineSmiBinaryOp(expr->binary_operation(),
1793 op,
1794 expr->target(),
1795 expr->value());
1796 } else {
1797 EmitBinaryOp(expr->binary_operation(), op);
1798 }
1799
1800 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001801 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001802 } else {
1803 VisitForAccumulatorValue(expr->value());
1804 }
1805
1806 SetExpressionPosition(expr);
1807
1808 // Store the value.
1809 switch (assign_type) {
1810 case VARIABLE:
1811 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1812 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001813 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001814 context()->Plug(v0);
1815 break;
1816 case NAMED_PROPERTY:
1817 EmitNamedPropertyAssignment(expr);
1818 break;
1819 case NAMED_SUPER_PROPERTY:
1820 EmitNamedSuperPropertyStore(property);
1821 context()->Plug(v0);
1822 break;
1823 case KEYED_SUPER_PROPERTY:
1824 EmitKeyedSuperPropertyStore(property);
1825 context()->Plug(v0);
1826 break;
1827 case KEYED_PROPERTY:
1828 EmitKeyedPropertyAssignment(expr);
1829 break;
1830 }
1831}
1832
1833
1834void FullCodeGenerator::VisitYield(Yield* expr) {
1835 Comment cmnt(masm_, "[ Yield");
1836 SetExpressionPosition(expr);
1837
1838 // Evaluate yielded value first; the initial iterator definition depends on
1839 // this. It stays on the stack while we update the iterator.
1840 VisitForStackValue(expr->expression());
1841
Ben Murdochc5610432016-08-08 18:44:38 +01001842 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001843
Ben Murdochda12d292016-06-02 14:46:10 +01001844 __ jmp(&suspend);
1845 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001846 // When we arrive here, v0 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001847 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001848 __ ld(a1, FieldMemOperand(v0, JSGeneratorObject::kResumeModeOffset));
1849 __ ld(v0, FieldMemOperand(v0, JSGeneratorObject::kInputOffset));
1850 __ Branch(&resume, eq, a1, Operand(Smi::FromInt(JSGeneratorObject::kNext)));
1851 __ Push(result_register());
1852 __ Branch(&exception, eq, a1,
1853 Operand(Smi::FromInt(JSGeneratorObject::kThrow)));
Ben Murdochda12d292016-06-02 14:46:10 +01001854 EmitCreateIteratorResult(true);
1855 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001856
Ben Murdochc5610432016-08-08 18:44:38 +01001857 __ bind(&exception);
1858 __ CallRuntime(Runtime::kThrow);
1859
Ben Murdochda12d292016-06-02 14:46:10 +01001860 __ bind(&suspend);
1861 OperandStackDepthIncrement(1); // Not popped on this path.
1862 VisitForAccumulatorValue(expr->generator_object());
1863 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1864 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1865 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1866 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1867 __ mov(a1, cp);
1868 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1869 kRAHasBeenSaved, kDontSaveFPRegs);
1870 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1871 __ Branch(&post_runtime, eq, sp, Operand(a1));
1872 __ push(v0); // generator object
1873 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001874 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001875 __ bind(&post_runtime);
1876 PopOperand(result_register());
1877 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001878
Ben Murdochda12d292016-06-02 14:46:10 +01001879 __ bind(&resume);
1880 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001881}
1882
Ben Murdoch097c5b22016-05-18 11:27:45 +01001883void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1884 OperandStackDepthIncrement(2);
1885 __ Push(reg1, reg2);
1886}
1887
1888void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1889 Register reg3) {
1890 OperandStackDepthIncrement(3);
1891 __ Push(reg1, reg2, reg3);
1892}
1893
1894void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1895 Register reg3, Register reg4) {
1896 OperandStackDepthIncrement(4);
1897 __ Push(reg1, reg2, reg3, reg4);
1898}
1899
1900void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1901 OperandStackDepthDecrement(2);
1902 __ Pop(reg1, reg2);
1903}
1904
1905void FullCodeGenerator::EmitOperandStackDepthCheck() {
1906 if (FLAG_debug_code) {
1907 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1908 operand_stack_depth_ * kPointerSize;
1909 __ Dsubu(v0, fp, sp);
1910 __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
1911 }
1912}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001913
1914void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1915 Label allocate, done_allocate;
1916
Ben Murdochc5610432016-08-08 18:44:38 +01001917 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
1918 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001919 __ jmp(&done_allocate);
1920
1921 __ bind(&allocate);
1922 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1923 __ CallRuntime(Runtime::kAllocateInNewSpace);
1924
1925 __ bind(&done_allocate);
1926 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
Ben Murdochda12d292016-06-02 14:46:10 +01001927 PopOperand(a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001928 __ LoadRoot(a3,
1929 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1930 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
1931 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
1932 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
1933 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
1934 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
1935 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
1936 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1937}
1938
1939
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001940void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1941 Token::Value op,
1942 Expression* left_expr,
1943 Expression* right_expr) {
1944 Label done, smi_case, stub_call;
1945
1946 Register scratch1 = a2;
1947 Register scratch2 = a3;
1948
1949 // Get the arguments.
1950 Register left = a1;
1951 Register right = a0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001952 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001953 __ mov(a0, result_register());
1954
1955 // Perform combined smi check on both operands.
1956 __ Or(scratch1, left, Operand(right));
1957 STATIC_ASSERT(kSmiTag == 0);
1958 JumpPatchSite patch_site(masm_);
1959 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1960
1961 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001962 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001963 CallIC(code, expr->BinaryOperationFeedbackId());
1964 patch_site.EmitPatchInfo();
1965 __ jmp(&done);
1966
1967 __ bind(&smi_case);
1968 // Smi case. This code works the same way as the smi-smi case in the type
1969 // recording binary operation stub, see
1970 switch (op) {
1971 case Token::SAR:
1972 __ GetLeastBitsFromSmi(scratch1, right, 5);
1973 __ dsrav(right, left, scratch1);
1974 __ And(v0, right, Operand(0xffffffff00000000L));
1975 break;
1976 case Token::SHL: {
1977 __ SmiUntag(scratch1, left);
1978 __ GetLeastBitsFromSmi(scratch2, right, 5);
1979 __ dsllv(scratch1, scratch1, scratch2);
1980 __ SmiTag(v0, scratch1);
1981 break;
1982 }
1983 case Token::SHR: {
1984 __ SmiUntag(scratch1, left);
1985 __ GetLeastBitsFromSmi(scratch2, right, 5);
1986 __ dsrlv(scratch1, scratch1, scratch2);
1987 __ And(scratch2, scratch1, 0x80000000);
1988 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1989 __ SmiTag(v0, scratch1);
1990 break;
1991 }
1992 case Token::ADD:
Ben Murdochda12d292016-06-02 14:46:10 +01001993 __ DaddBranchOvf(v0, left, Operand(right), &stub_call);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001994 break;
1995 case Token::SUB:
Ben Murdochda12d292016-06-02 14:46:10 +01001996 __ DsubBranchOvf(v0, left, Operand(right), &stub_call);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001997 break;
1998 case Token::MUL: {
1999 __ Dmulh(v0, left, right);
2000 __ dsra32(scratch2, v0, 0);
2001 __ sra(scratch1, v0, 31);
2002 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2003 __ SmiTag(v0);
2004 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2005 __ Daddu(scratch2, right, left);
2006 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2007 DCHECK(Smi::FromInt(0) == 0);
2008 __ mov(v0, zero_reg);
2009 break;
2010 }
2011 case Token::BIT_OR:
2012 __ Or(v0, left, Operand(right));
2013 break;
2014 case Token::BIT_AND:
2015 __ And(v0, left, Operand(right));
2016 break;
2017 case Token::BIT_XOR:
2018 __ Xor(v0, left, Operand(right));
2019 break;
2020 default:
2021 UNREACHABLE();
2022 }
2023
2024 __ bind(&done);
2025 context()->Plug(v0);
2026}
2027
2028
2029void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002030 for (int i = 0; i < lit->properties()->length(); i++) {
2031 ObjectLiteral::Property* property = lit->properties()->at(i);
2032 Expression* value = property->value();
2033
Ben Murdoch097c5b22016-05-18 11:27:45 +01002034 Register scratch = a1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002035 if (property->is_static()) {
2036 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2037 } else {
2038 __ ld(scratch, MemOperand(sp, 0)); // prototype
2039 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002040 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002041 EmitPropertyKey(property, lit->GetIdForProperty(i));
2042
2043 // The static prototype property is read only. We handle the non computed
2044 // property name case in the parser. Since this is the only case where we
2045 // need to check for an own read only property we special case this so we do
2046 // not need to do this for every property.
2047 if (property->is_static() && property->is_computed_name()) {
2048 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2049 __ push(v0);
2050 }
2051
2052 VisitForStackValue(value);
2053 if (NeedsHomeObject(value)) {
2054 EmitSetHomeObject(value, 2, property->GetSlot());
2055 }
2056
2057 switch (property->kind()) {
2058 case ObjectLiteral::Property::CONSTANT:
2059 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2060 case ObjectLiteral::Property::PROTOTYPE:
2061 UNREACHABLE();
2062 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002063 PushOperand(Smi::FromInt(DONT_ENUM));
2064 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2065 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002066 break;
2067
2068 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002069 PushOperand(Smi::FromInt(DONT_ENUM));
2070 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002071 break;
2072
2073 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002074 PushOperand(Smi::FromInt(DONT_ENUM));
2075 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002076 break;
2077
2078 default:
2079 UNREACHABLE();
2080 }
2081 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002082}
2083
2084
2085void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2086 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002087 PopOperand(a1);
2088 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002089 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2090 CallIC(code, expr->BinaryOperationFeedbackId());
2091 patch_site.EmitPatchInfo();
2092 context()->Plug(v0);
2093}
2094
2095
2096void FullCodeGenerator::EmitAssignment(Expression* expr,
2097 FeedbackVectorSlot slot) {
2098 DCHECK(expr->IsValidReferenceExpressionOrThis());
2099
2100 Property* prop = expr->AsProperty();
2101 LhsKind assign_type = Property::GetAssignType(prop);
2102
2103 switch (assign_type) {
2104 case VARIABLE: {
2105 Variable* var = expr->AsVariableProxy()->var();
2106 EffectContext context(this);
2107 EmitVariableAssignment(var, Token::ASSIGN, slot);
2108 break;
2109 }
2110 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002111 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112 VisitForAccumulatorValue(prop->obj());
2113 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002114 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002115 __ li(StoreDescriptor::NameRegister(),
2116 Operand(prop->key()->AsLiteral()->value()));
2117 EmitLoadStoreICSlot(slot);
2118 CallStoreIC();
2119 break;
2120 }
2121 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002122 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2124 VisitForAccumulatorValue(
2125 prop->obj()->AsSuperPropertyReference()->home_object());
2126 // stack: value, this; v0: home_object
2127 Register scratch = a2;
2128 Register scratch2 = a3;
2129 __ mov(scratch, result_register()); // home_object
2130 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2131 __ ld(scratch2, MemOperand(sp, 0)); // this
2132 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2133 __ sd(scratch, MemOperand(sp, 0)); // home_object
2134 // stack: this, home_object; v0: value
2135 EmitNamedSuperPropertyStore(prop);
2136 break;
2137 }
2138 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002139 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002140 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2141 VisitForStackValue(
2142 prop->obj()->AsSuperPropertyReference()->home_object());
2143 VisitForAccumulatorValue(prop->key());
2144 Register scratch = a2;
2145 Register scratch2 = a3;
2146 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2147 // stack: value, this, home_object; v0: key, a3: value
2148 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2149 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2150 __ ld(scratch, MemOperand(sp, 0)); // home_object
2151 __ sd(scratch, MemOperand(sp, kPointerSize));
2152 __ sd(v0, MemOperand(sp, 0));
2153 __ Move(v0, scratch2);
2154 // stack: this, home_object, key; v0: value.
2155 EmitKeyedSuperPropertyStore(prop);
2156 break;
2157 }
2158 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002159 PushOperand(result_register()); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 VisitForStackValue(prop->obj());
2161 VisitForAccumulatorValue(prop->key());
2162 __ Move(StoreDescriptor::NameRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002163 PopOperands(StoreDescriptor::ValueRegister(),
2164 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002165 EmitLoadStoreICSlot(slot);
2166 Handle<Code> ic =
2167 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2168 CallIC(ic);
2169 break;
2170 }
2171 }
2172 context()->Plug(v0);
2173}
2174
2175
2176void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2177 Variable* var, MemOperand location) {
2178 __ sd(result_register(), location);
2179 if (var->IsContextSlot()) {
2180 // RecordWrite may destroy all its register arguments.
2181 __ Move(a3, result_register());
2182 int offset = Context::SlotOffset(var->index());
2183 __ RecordWriteContextSlot(
2184 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2185 }
2186}
2187
2188
2189void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2190 FeedbackVectorSlot slot) {
2191 if (var->IsUnallocated()) {
2192 // Global var, const, or let.
2193 __ mov(StoreDescriptor::ValueRegister(), result_register());
2194 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2195 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2196 EmitLoadStoreICSlot(slot);
2197 CallStoreIC();
2198
2199 } else if (var->mode() == LET && op != Token::INIT) {
2200 // Non-initializing assignment to let variable needs a write barrier.
2201 DCHECK(!var->IsLookupSlot());
2202 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2203 Label assign;
2204 MemOperand location = VarOperand(var, a1);
2205 __ ld(a3, location);
2206 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2207 __ Branch(&assign, ne, a3, Operand(a4));
2208 __ li(a3, Operand(var->name()));
2209 __ push(a3);
2210 __ CallRuntime(Runtime::kThrowReferenceError);
2211 // Perform the assignment.
2212 __ bind(&assign);
2213 EmitStoreToStackLocalOrContextSlot(var, location);
2214
2215 } else if (var->mode() == CONST && op != Token::INIT) {
2216 // Assignment to const variable needs a write barrier.
2217 DCHECK(!var->IsLookupSlot());
2218 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2219 Label const_error;
2220 MemOperand location = VarOperand(var, a1);
2221 __ ld(a3, location);
2222 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2223 __ Branch(&const_error, ne, a3, Operand(at));
2224 __ li(a3, Operand(var->name()));
2225 __ push(a3);
2226 __ CallRuntime(Runtime::kThrowReferenceError);
2227 __ bind(&const_error);
2228 __ CallRuntime(Runtime::kThrowConstAssignError);
2229
2230 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2231 // Initializing assignment to const {this} needs a write barrier.
2232 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2233 Label uninitialized_this;
2234 MemOperand location = VarOperand(var, a1);
2235 __ ld(a3, location);
2236 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2237 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2238 __ li(a0, Operand(var->name()));
2239 __ Push(a0);
2240 __ CallRuntime(Runtime::kThrowReferenceError);
2241 __ bind(&uninitialized_this);
2242 EmitStoreToStackLocalOrContextSlot(var, location);
2243
Ben Murdochc5610432016-08-08 18:44:38 +01002244 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002245 if (var->IsLookupSlot()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002246 __ Push(var->name());
2247 __ Push(v0);
2248 __ CallRuntime(is_strict(language_mode())
2249 ? Runtime::kStoreLookupSlot_Strict
2250 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251 } else {
2252 // Assignment to var or initializing assignment to let/const in harmony
2253 // mode.
2254 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2255 MemOperand location = VarOperand(var, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002256 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 // Check for an uninitialized let binding.
2258 __ ld(a2, location);
2259 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2260 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2261 }
2262 EmitStoreToStackLocalOrContextSlot(var, location);
2263 }
2264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002265 } else {
2266 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2267 if (is_strict(language_mode())) {
2268 __ CallRuntime(Runtime::kThrowConstAssignError);
2269 }
2270 // Silently ignore store in sloppy mode.
2271 }
2272}
2273
2274
2275void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2276 // Assignment to a property, using a named store IC.
2277 Property* prop = expr->target()->AsProperty();
2278 DCHECK(prop != NULL);
2279 DCHECK(prop->key()->IsLiteral());
2280
2281 __ mov(StoreDescriptor::ValueRegister(), result_register());
2282 __ li(StoreDescriptor::NameRegister(),
2283 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002284 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002285 EmitLoadStoreICSlot(expr->AssignmentSlot());
2286 CallStoreIC();
2287
Ben Murdochc5610432016-08-08 18:44:38 +01002288 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002289 context()->Plug(v0);
2290}
2291
2292
2293void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2294 // Assignment to named property of super.
2295 // v0 : value
2296 // stack : receiver ('this'), home_object
2297 DCHECK(prop != NULL);
2298 Literal* key = prop->key()->AsLiteral();
2299 DCHECK(key != NULL);
2300
Ben Murdoch097c5b22016-05-18 11:27:45 +01002301 PushOperand(key->value());
2302 PushOperand(v0);
2303 CallRuntimeWithOperands(is_strict(language_mode())
2304 ? Runtime::kStoreToSuper_Strict
2305 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002306}
2307
2308
2309void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2310 // Assignment to named property of super.
2311 // v0 : value
2312 // stack : receiver ('this'), home_object, key
2313 DCHECK(prop != NULL);
2314
Ben Murdoch097c5b22016-05-18 11:27:45 +01002315 PushOperand(v0);
2316 CallRuntimeWithOperands(is_strict(language_mode())
2317 ? Runtime::kStoreKeyedToSuper_Strict
2318 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002319}
2320
2321
2322void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2323 // Assignment to a property, using a keyed store IC.
2324 // Call keyed store IC.
2325 // The arguments are:
2326 // - a0 is the value,
2327 // - a1 is the key,
2328 // - a2 is the receiver.
2329 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002330 PopOperands(StoreDescriptor::ReceiverRegister(),
2331 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002332 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2333
2334 Handle<Code> ic =
2335 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2336 EmitLoadStoreICSlot(expr->AssignmentSlot());
2337 CallIC(ic);
2338
Ben Murdochc5610432016-08-08 18:44:38 +01002339 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002340 context()->Plug(v0);
2341}
2342
2343
2344void FullCodeGenerator::CallIC(Handle<Code> code,
2345 TypeFeedbackId id) {
2346 ic_total_count_++;
2347 __ Call(code, RelocInfo::CODE_TARGET, id);
2348}
2349
2350
2351// Code common for calls using the IC.
2352void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2353 Expression* callee = expr->expression();
2354
2355 // Get the target function.
2356 ConvertReceiverMode convert_mode;
2357 if (callee->IsVariableProxy()) {
2358 { StackValueContext context(this);
2359 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002360 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002361 }
2362 // Push undefined as receiver. This is patched in the method prologue if it
2363 // is a sloppy mode method.
2364 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002365 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002366 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2367 } else {
2368 // Load the function from the receiver.
2369 DCHECK(callee->IsProperty());
2370 DCHECK(!callee->AsProperty()->IsSuperAccess());
2371 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2372 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002373 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2374 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002375 // Push the target function under the receiver.
2376 __ ld(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002377 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002378 __ sd(v0, MemOperand(sp, kPointerSize));
2379 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2380 }
2381
2382 EmitCall(expr, convert_mode);
2383}
2384
2385
2386void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2387 SetExpressionPosition(expr);
2388 Expression* callee = expr->expression();
2389 DCHECK(callee->IsProperty());
2390 Property* prop = callee->AsProperty();
2391 DCHECK(prop->IsSuperAccess());
2392
2393 Literal* key = prop->key()->AsLiteral();
2394 DCHECK(!key->value()->IsSmi());
2395 // Load the function from the receiver.
2396 const Register scratch = a1;
2397 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2398 VisitForAccumulatorValue(super_ref->home_object());
2399 __ mov(scratch, v0);
2400 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002401 PushOperands(scratch, v0, v0, scratch);
2402 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002403
2404 // Stack here:
2405 // - home_object
2406 // - this (receiver)
2407 // - this (receiver) <-- LoadFromSuper will pop here and below.
2408 // - home_object
2409 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002410 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002411 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002412
2413 // Replace home_object with target function.
2414 __ sd(v0, MemOperand(sp, kPointerSize));
2415
2416 // Stack here:
2417 // - target function
2418 // - this (receiver)
2419 EmitCall(expr);
2420}
2421
2422
2423// Code common for calls using the IC.
2424void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2425 Expression* key) {
2426 // Load the key.
2427 VisitForAccumulatorValue(key);
2428
2429 Expression* callee = expr->expression();
2430
2431 // Load the function from the receiver.
2432 DCHECK(callee->IsProperty());
2433 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2434 __ Move(LoadDescriptor::NameRegister(), v0);
2435 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002436 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2437 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002438
2439 // Push the target function under the receiver.
2440 __ ld(at, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002441 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002442 __ sd(v0, MemOperand(sp, kPointerSize));
2443
2444 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2445}
2446
2447
2448void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2449 Expression* callee = expr->expression();
2450 DCHECK(callee->IsProperty());
2451 Property* prop = callee->AsProperty();
2452 DCHECK(prop->IsSuperAccess());
2453
2454 SetExpressionPosition(prop);
2455 // Load the function from the receiver.
2456 const Register scratch = a1;
2457 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2458 VisitForAccumulatorValue(super_ref->home_object());
2459 __ Move(scratch, v0);
2460 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002461 PushOperands(scratch, v0, v0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002462 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002463
2464 // Stack here:
2465 // - home_object
2466 // - this (receiver)
2467 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2468 // - home_object
2469 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002470 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002471 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002472
2473 // Replace home_object with target function.
2474 __ sd(v0, MemOperand(sp, kPointerSize));
2475
2476 // Stack here:
2477 // - target function
2478 // - this (receiver)
2479 EmitCall(expr);
2480}
2481
2482
2483void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2484 // Load the arguments.
2485 ZoneList<Expression*>* args = expr->arguments();
2486 int arg_count = args->length();
2487 for (int i = 0; i < arg_count; i++) {
2488 VisitForStackValue(args->at(i));
2489 }
2490
Ben Murdochc5610432016-08-08 18:44:38 +01002491 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002492 // Record source position of the IC call.
Ben Murdochda12d292016-06-02 14:46:10 +01002493 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002494 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2495 if (FLAG_trace) {
2496 __ CallRuntime(Runtime::kTraceTailCall);
2497 }
2498 // Update profiling counters before the tail call since we will
2499 // not return to this function.
2500 EmitProfilingCounterHandlingForReturnSequence(true);
2501 }
2502 Handle<Code> ic =
2503 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2504 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002505 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2506 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2507 // Don't assign a type feedback id to the IC, since type feedback is provided
2508 // by the vector above.
2509 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002510 OperandStackDepthDecrement(arg_count + 1);
2511
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002512 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002513 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002514 context()->DropAndPlug(1, v0);
2515}
2516
Ben Murdochc5610432016-08-08 18:44:38 +01002517void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2518 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002519 // a6: copy of the first argument or undefined if it doesn't exist.
2520 if (arg_count > 0) {
2521 __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
2522 } else {
2523 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
2524 }
2525
2526 // a5: the receiver of the enclosing function.
2527 __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2528
2529 // a4: the language mode.
2530 __ li(a4, Operand(Smi::FromInt(language_mode())));
2531
2532 // a1: the start position of the scope the calls resides in.
2533 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2534
Ben Murdochc5610432016-08-08 18:44:38 +01002535 // a0: the source position of the eval call.
2536 __ li(a0, Operand(Smi::FromInt(expr->position())));
2537
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002538 // Do the runtime call.
Ben Murdochc5610432016-08-08 18:44:38 +01002539 __ Push(a6, a5, a4, a1, a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002540 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2541}
2542
2543
2544// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2545void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2546 VariableProxy* callee = expr->expression()->AsVariableProxy();
2547 if (callee->var()->IsLookupSlot()) {
2548 Label slow, done;
2549
2550 SetExpressionPosition(callee);
2551 // Generate code for loading from variables potentially shadowed by
2552 // eval-introduced variables.
2553 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2554
2555 __ bind(&slow);
2556 // Call the runtime to find the function to call (returned in v0)
2557 // and the object holding it (returned in v1).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002558 __ Push(callee->name());
2559 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2560 PushOperands(v0, v1); // Function, receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002561 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002562
2563 // If fast case code has been generated, emit code to push the
2564 // function and receiver and have the slow path jump around this
2565 // code.
2566 if (done.is_linked()) {
2567 Label call;
2568 __ Branch(&call);
2569 __ bind(&done);
2570 // Push function.
2571 __ push(v0);
2572 // The receiver is implicitly the global receiver. Indicate this
2573 // by passing the hole to the call function stub.
2574 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2575 __ push(a1);
2576 __ bind(&call);
2577 }
2578 } else {
2579 VisitForStackValue(callee);
2580 // refEnv.WithBaseObject()
2581 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002582 PushOperand(a2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002583 }
2584}
2585
2586
2587void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
Ben Murdochc5610432016-08-08 18:44:38 +01002588 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002589 // to resolve the function we need to call. Then we call the resolved
2590 // function using the given arguments.
2591 ZoneList<Expression*>* args = expr->arguments();
2592 int arg_count = args->length();
2593 PushCalleeAndWithBaseObject(expr);
2594
2595 // Push the arguments.
2596 for (int i = 0; i < arg_count; i++) {
2597 VisitForStackValue(args->at(i));
2598 }
2599
2600 // Push a copy of the function (found below the arguments) and
2601 // resolve eval.
2602 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2603 __ push(a1);
Ben Murdochc5610432016-08-08 18:44:38 +01002604 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002605
2606 // Touch up the stack with the resolved function.
2607 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2608
Ben Murdochc5610432016-08-08 18:44:38 +01002609 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002610 // Record source position for debugger.
2611 SetCallPosition(expr);
2612 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2613 __ li(a0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002614 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2615 expr->tail_call_mode()),
2616 RelocInfo::CODE_TARGET);
2617 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002618 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002619 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002620 context()->DropAndPlug(1, v0);
2621}
2622
2623
2624void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2625 Comment cmnt(masm_, "[ CallNew");
2626 // According to ECMA-262, section 11.2.2, page 44, the function
2627 // expression in new calls must be evaluated before the
2628 // arguments.
2629
2630 // Push constructor on the stack. If it's not a function it's used as
2631 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2632 // ignored.
2633 DCHECK(!expr->expression()->IsSuperPropertyReference());
2634 VisitForStackValue(expr->expression());
2635
2636 // Push the arguments ("left-to-right") on the stack.
2637 ZoneList<Expression*>* args = expr->arguments();
2638 int arg_count = args->length();
2639 for (int i = 0; i < arg_count; i++) {
2640 VisitForStackValue(args->at(i));
2641 }
2642
2643 // Call the construct call builtin that handles allocation and
2644 // constructor invocation.
2645 SetConstructCallPosition(expr);
2646
2647 // Load function and argument count into a1 and a0.
2648 __ li(a0, Operand(arg_count));
2649 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2650
2651 // Record call targets in unoptimized code.
2652 __ EmitLoadTypeFeedbackVector(a2);
2653 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2654
2655 CallConstructStub stub(isolate());
2656 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002657 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002658 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2659 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002660 context()->Plug(v0);
2661}
2662
2663
2664void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2665 SuperCallReference* super_call_ref =
2666 expr->expression()->AsSuperCallReference();
2667 DCHECK_NOT_NULL(super_call_ref);
2668
2669 // Push the super constructor target on the stack (may be null,
2670 // but the Construct builtin can deal with that properly).
2671 VisitForAccumulatorValue(super_call_ref->this_function_var());
2672 __ AssertFunction(result_register());
2673 __ ld(result_register(),
2674 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2675 __ ld(result_register(),
2676 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002677 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002678
2679 // Push the arguments ("left-to-right") on the stack.
2680 ZoneList<Expression*>* args = expr->arguments();
2681 int arg_count = args->length();
2682 for (int i = 0; i < arg_count; i++) {
2683 VisitForStackValue(args->at(i));
2684 }
2685
2686 // Call the construct call builtin that handles allocation and
2687 // constructor invocation.
2688 SetConstructCallPosition(expr);
2689
2690 // Load new target into a3.
2691 VisitForAccumulatorValue(super_call_ref->new_target_var());
2692 __ mov(a3, result_register());
2693
2694 // Load function and argument count into a1 and a0.
2695 __ li(a0, Operand(arg_count));
2696 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2697
2698 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002699 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002700
2701 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002702 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002703 context()->Plug(v0);
2704}
2705
2706
2707void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2708 ZoneList<Expression*>* args = expr->arguments();
2709 DCHECK(args->length() == 1);
2710
2711 VisitForAccumulatorValue(args->at(0));
2712
2713 Label materialize_true, materialize_false;
2714 Label* if_true = NULL;
2715 Label* if_false = NULL;
2716 Label* fall_through = NULL;
2717 context()->PrepareTest(&materialize_true, &materialize_false,
2718 &if_true, &if_false, &fall_through);
2719
2720 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2721 __ SmiTst(v0, a4);
2722 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2723
2724 context()->Plug(if_true, if_false);
2725}
2726
2727
2728void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2729 ZoneList<Expression*>* args = expr->arguments();
2730 DCHECK(args->length() == 1);
2731
2732 VisitForAccumulatorValue(args->at(0));
2733
2734 Label materialize_true, materialize_false;
2735 Label* if_true = NULL;
2736 Label* if_false = NULL;
2737 Label* fall_through = NULL;
2738 context()->PrepareTest(&materialize_true, &materialize_false,
2739 &if_true, &if_false, &fall_through);
2740
2741 __ JumpIfSmi(v0, if_false);
2742 __ GetObjectType(v0, a1, a1);
2743 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2744 Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2745 if_true, if_false, fall_through);
2746
2747 context()->Plug(if_true, if_false);
2748}
2749
2750
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002751void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2752 ZoneList<Expression*>* args = expr->arguments();
2753 DCHECK(args->length() == 1);
2754
2755 VisitForAccumulatorValue(args->at(0));
2756
2757 Label materialize_true, materialize_false;
2758 Label* if_true = NULL;
2759 Label* if_false = NULL;
2760 Label* fall_through = NULL;
2761 context()->PrepareTest(&materialize_true, &materialize_false,
2762 &if_true, &if_false, &fall_through);
2763
2764 __ JumpIfSmi(v0, if_false);
2765 __ GetObjectType(v0, a1, a1);
2766 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2767 Split(eq, a1, Operand(JS_ARRAY_TYPE),
2768 if_true, if_false, fall_through);
2769
2770 context()->Plug(if_true, if_false);
2771}
2772
2773
2774void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2775 ZoneList<Expression*>* args = expr->arguments();
2776 DCHECK(args->length() == 1);
2777
2778 VisitForAccumulatorValue(args->at(0));
2779
2780 Label materialize_true, materialize_false;
2781 Label* if_true = NULL;
2782 Label* if_false = NULL;
2783 Label* fall_through = NULL;
2784 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2785 &if_false, &fall_through);
2786
2787 __ JumpIfSmi(v0, if_false);
2788 __ GetObjectType(v0, a1, a1);
2789 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2790 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
2791
2792 context()->Plug(if_true, if_false);
2793}
2794
2795
2796void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2797 ZoneList<Expression*>* args = expr->arguments();
2798 DCHECK(args->length() == 1);
2799
2800 VisitForAccumulatorValue(args->at(0));
2801
2802 Label materialize_true, materialize_false;
2803 Label* if_true = NULL;
2804 Label* if_false = NULL;
2805 Label* fall_through = NULL;
2806 context()->PrepareTest(&materialize_true, &materialize_false,
2807 &if_true, &if_false, &fall_through);
2808
2809 __ JumpIfSmi(v0, if_false);
2810 __ GetObjectType(v0, a1, a1);
2811 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2812 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2813
2814 context()->Plug(if_true, if_false);
2815}
2816
2817
2818void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2819 ZoneList<Expression*>* args = expr->arguments();
2820 DCHECK(args->length() == 1);
2821
2822 VisitForAccumulatorValue(args->at(0));
2823
2824 Label materialize_true, materialize_false;
2825 Label* if_true = NULL;
2826 Label* if_false = NULL;
2827 Label* fall_through = NULL;
2828 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2829 &if_false, &fall_through);
2830
2831 __ JumpIfSmi(v0, if_false);
2832 __ GetObjectType(v0, a1, a1);
2833 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2834 Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
2835
2836 context()->Plug(if_true, if_false);
2837}
2838
2839
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002840void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2841 ZoneList<Expression*>* args = expr->arguments();
2842 DCHECK(args->length() == 1);
2843 Label done, null, function, non_function_constructor;
2844
2845 VisitForAccumulatorValue(args->at(0));
2846
2847 // If the object is not a JSReceiver, we return null.
2848 __ JumpIfSmi(v0, &null);
2849 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2850 __ GetObjectType(v0, v0, a1); // Map is now in v0.
2851 __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
2852
Ben Murdochda12d292016-06-02 14:46:10 +01002853 // Return 'Function' for JSFunction and JSBoundFunction objects.
2854 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2855 __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002856
2857 // Check if the constructor in the map is a JS function.
2858 Register instance_type = a2;
2859 __ GetMapConstructor(v0, v0, a1, instance_type);
2860 __ Branch(&non_function_constructor, ne, instance_type,
2861 Operand(JS_FUNCTION_TYPE));
2862
2863 // v0 now contains the constructor function. Grab the
2864 // instance class name from there.
2865 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2866 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2867 __ Branch(&done);
2868
2869 // Functions have class 'Function'.
2870 __ bind(&function);
2871 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
2872 __ jmp(&done);
2873
2874 // Objects with a non-function constructor have class 'Object'.
2875 __ bind(&non_function_constructor);
2876 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
2877 __ jmp(&done);
2878
2879 // Non-JS objects have class null.
2880 __ bind(&null);
2881 __ LoadRoot(v0, Heap::kNullValueRootIndex);
2882
2883 // All done.
2884 __ bind(&done);
2885
2886 context()->Plug(v0);
2887}
2888
2889
2890void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2891 ZoneList<Expression*>* args = expr->arguments();
2892 DCHECK(args->length() == 1);
2893
2894 VisitForAccumulatorValue(args->at(0)); // Load the object.
2895
2896 Label done;
2897 // If the object is a smi return the object.
2898 __ JumpIfSmi(v0, &done);
2899 // If the object is not a value type, return the object.
2900 __ GetObjectType(v0, a1, a1);
2901 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
2902
2903 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
2904
2905 __ bind(&done);
2906 context()->Plug(v0);
2907}
2908
2909
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002910void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2911 ZoneList<Expression*>* args = expr->arguments();
2912 DCHECK_EQ(3, args->length());
2913
2914 Register string = v0;
2915 Register index = a1;
2916 Register value = a2;
2917
2918 VisitForStackValue(args->at(0)); // index
2919 VisitForStackValue(args->at(1)); // value
2920 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002921 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002922
2923 if (FLAG_debug_code) {
2924 __ SmiTst(value, at);
2925 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
2926 __ SmiTst(index, at);
2927 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
2928 __ SmiUntag(index, index);
2929 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2930 Register scratch = t1;
2931 __ EmitSeqStringSetCharCheck(
2932 string, index, value, scratch, one_byte_seq_type);
2933 __ SmiTag(index, index);
2934 }
2935
2936 __ SmiUntag(value, value);
2937 __ Daddu(at,
2938 string,
2939 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2940 __ SmiUntag(index);
2941 __ Daddu(at, at, index);
2942 __ sb(value, MemOperand(at));
2943 context()->Plug(string);
2944}
2945
2946
2947void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2948 ZoneList<Expression*>* args = expr->arguments();
2949 DCHECK_EQ(3, args->length());
2950
2951 Register string = v0;
2952 Register index = a1;
2953 Register value = a2;
2954
2955 VisitForStackValue(args->at(0)); // index
2956 VisitForStackValue(args->at(1)); // value
2957 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002958 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002959
2960 if (FLAG_debug_code) {
2961 __ SmiTst(value, at);
2962 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
2963 __ SmiTst(index, at);
2964 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
2965 __ SmiUntag(index, index);
2966 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2967 Register scratch = t1;
2968 __ EmitSeqStringSetCharCheck(
2969 string, index, value, scratch, two_byte_seq_type);
2970 __ SmiTag(index, index);
2971 }
2972
2973 __ SmiUntag(value, value);
2974 __ Daddu(at,
2975 string,
2976 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2977 __ dsra(index, index, 32 - 1);
2978 __ Daddu(at, at, index);
2979 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2980 __ sh(value, MemOperand(at));
2981 context()->Plug(string);
2982}
2983
2984
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002985void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2986 ZoneList<Expression*>* args = expr->arguments();
2987 DCHECK(args->length() == 1);
2988
2989 VisitForAccumulatorValue(args->at(0));
2990
2991 Label done;
2992 StringCharFromCodeGenerator generator(v0, a1);
2993 generator.GenerateFast(masm_);
2994 __ jmp(&done);
2995
2996 NopRuntimeCallHelper call_helper;
2997 generator.GenerateSlow(masm_, call_helper);
2998
2999 __ bind(&done);
3000 context()->Plug(a1);
3001}
3002
3003
3004void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3005 ZoneList<Expression*>* args = expr->arguments();
3006 DCHECK(args->length() == 2);
3007
3008 VisitForStackValue(args->at(0));
3009 VisitForAccumulatorValue(args->at(1));
3010 __ mov(a0, result_register());
3011
3012 Register object = a1;
3013 Register index = a0;
3014 Register result = v0;
3015
Ben Murdoch097c5b22016-05-18 11:27:45 +01003016 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003017
3018 Label need_conversion;
3019 Label index_out_of_range;
3020 Label done;
3021 StringCharCodeAtGenerator generator(object,
3022 index,
3023 result,
3024 &need_conversion,
3025 &need_conversion,
3026 &index_out_of_range,
3027 STRING_INDEX_IS_NUMBER);
3028 generator.GenerateFast(masm_);
3029 __ jmp(&done);
3030
3031 __ bind(&index_out_of_range);
3032 // When the index is out of range, the spec requires us to return
3033 // NaN.
3034 __ LoadRoot(result, Heap::kNanValueRootIndex);
3035 __ jmp(&done);
3036
3037 __ bind(&need_conversion);
3038 // Load the undefined value into the result register, which will
3039 // trigger conversion.
3040 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3041 __ jmp(&done);
3042
3043 NopRuntimeCallHelper call_helper;
3044 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3045
3046 __ bind(&done);
3047 context()->Plug(result);
3048}
3049
3050
3051void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3052 ZoneList<Expression*>* args = expr->arguments();
3053 DCHECK(args->length() == 2);
3054
3055 VisitForStackValue(args->at(0));
3056 VisitForAccumulatorValue(args->at(1));
3057 __ mov(a0, result_register());
3058
3059 Register object = a1;
3060 Register index = a0;
3061 Register scratch = a3;
3062 Register result = v0;
3063
Ben Murdoch097c5b22016-05-18 11:27:45 +01003064 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003065
3066 Label need_conversion;
3067 Label index_out_of_range;
3068 Label done;
3069 StringCharAtGenerator generator(object,
3070 index,
3071 scratch,
3072 result,
3073 &need_conversion,
3074 &need_conversion,
3075 &index_out_of_range,
3076 STRING_INDEX_IS_NUMBER);
3077 generator.GenerateFast(masm_);
3078 __ jmp(&done);
3079
3080 __ bind(&index_out_of_range);
3081 // When the index is out of range, the spec requires us to return
3082 // the empty string.
3083 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3084 __ jmp(&done);
3085
3086 __ bind(&need_conversion);
3087 // Move smi zero into the result register, which will trigger
3088 // conversion.
3089 __ li(result, Operand(Smi::FromInt(0)));
3090 __ jmp(&done);
3091
3092 NopRuntimeCallHelper call_helper;
3093 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3094
3095 __ bind(&done);
3096 context()->Plug(result);
3097}
3098
3099
3100void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3101 ZoneList<Expression*>* args = expr->arguments();
3102 DCHECK_LE(2, args->length());
3103 // Push target, receiver and arguments onto the stack.
3104 for (Expression* const arg : *args) {
3105 VisitForStackValue(arg);
3106 }
Ben Murdochc5610432016-08-08 18:44:38 +01003107 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003108 // Move target to a1.
3109 int const argc = args->length() - 2;
3110 __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
3111 // Call the target.
3112 __ li(a0, Operand(argc));
3113 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003114 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003115 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003116 // Discard the function left on TOS.
3117 context()->DropAndPlug(1, v0);
3118}
3119
3120
3121void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3122 ZoneList<Expression*>* args = expr->arguments();
3123 VisitForAccumulatorValue(args->at(0));
3124
3125 Label materialize_true, materialize_false;
3126 Label* if_true = NULL;
3127 Label* if_false = NULL;
3128 Label* fall_through = NULL;
3129 context()->PrepareTest(&materialize_true, &materialize_false,
3130 &if_true, &if_false, &fall_through);
3131
3132 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3133 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3134
3135 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3136 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3137
3138 context()->Plug(if_true, if_false);
3139}
3140
3141
3142void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3143 ZoneList<Expression*>* args = expr->arguments();
3144 DCHECK(args->length() == 1);
3145 VisitForAccumulatorValue(args->at(0));
3146
3147 __ AssertString(v0);
3148
3149 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3150 __ IndexFromHash(v0, v0);
3151
3152 context()->Plug(v0);
3153}
3154
3155
3156void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3157 ZoneList<Expression*>* args = expr->arguments();
3158 DCHECK_EQ(1, args->length());
3159 VisitForAccumulatorValue(args->at(0));
3160 __ AssertFunction(v0);
3161 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3162 __ ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
3163 context()->Plug(v0);
3164}
3165
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003166void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3167 DCHECK(expr->arguments()->length() == 0);
3168 ExternalReference debug_is_active =
3169 ExternalReference::debug_is_active_address(isolate());
3170 __ li(at, Operand(debug_is_active));
3171 __ lbu(v0, MemOperand(at));
3172 __ SmiTag(v0);
3173 context()->Plug(v0);
3174}
3175
3176
3177void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3178 ZoneList<Expression*>* args = expr->arguments();
3179 DCHECK_EQ(2, args->length());
3180 VisitForStackValue(args->at(0));
3181 VisitForStackValue(args->at(1));
3182
3183 Label runtime, done;
3184
Ben Murdochc5610432016-08-08 18:44:38 +01003185 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
3186 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003187 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
3188 __ Pop(a2, a3);
3189 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
3190 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3191 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3192 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
3193 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
3194 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
3195 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3196 __ jmp(&done);
3197
3198 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003199 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003200
3201 __ bind(&done);
3202 context()->Plug(v0);
3203}
3204
3205
3206void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003207 // Push function.
3208 __ LoadNativeContextSlot(expr->context_index(), v0);
3209 PushOperand(v0);
3210
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003211 // Push undefined as the receiver.
3212 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003213 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003214}
3215
3216
3217void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3218 ZoneList<Expression*>* args = expr->arguments();
3219 int arg_count = args->length();
3220
3221 SetCallPosition(expr);
3222 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3223 __ li(a0, Operand(arg_count));
3224 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3225 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003226 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003227 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003228}
3229
3230
3231void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3232 switch (expr->op()) {
3233 case Token::DELETE: {
3234 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3235 Property* property = expr->expression()->AsProperty();
3236 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3237
3238 if (property != NULL) {
3239 VisitForStackValue(property->obj());
3240 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003241 CallRuntimeWithOperands(is_strict(language_mode())
3242 ? Runtime::kDeleteProperty_Strict
3243 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003244 context()->Plug(v0);
3245 } else if (proxy != NULL) {
3246 Variable* var = proxy->var();
3247 // Delete of an unqualified identifier is disallowed in strict mode but
3248 // "delete this" is allowed.
3249 bool is_this = var->HasThisName(isolate());
3250 DCHECK(is_sloppy(language_mode()) || is_this);
3251 if (var->IsUnallocatedOrGlobalSlot()) {
3252 __ LoadGlobalObject(a2);
3253 __ li(a1, Operand(var->name()));
3254 __ Push(a2, a1);
3255 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3256 context()->Plug(v0);
3257 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3258 // Result of deleting non-global, non-dynamic variables is false.
3259 // The subexpression does not have side effects.
3260 context()->Plug(is_this);
3261 } else {
3262 // Non-global variable. Call the runtime to try to delete from the
3263 // context where the variable was introduced.
3264 DCHECK(!context_register().is(a2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003265 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003266 __ CallRuntime(Runtime::kDeleteLookupSlot);
3267 context()->Plug(v0);
3268 }
3269 } else {
3270 // Result of deleting non-property, non-variable reference is true.
3271 // The subexpression may have side effects.
3272 VisitForEffect(expr->expression());
3273 context()->Plug(true);
3274 }
3275 break;
3276 }
3277
3278 case Token::VOID: {
3279 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3280 VisitForEffect(expr->expression());
3281 context()->Plug(Heap::kUndefinedValueRootIndex);
3282 break;
3283 }
3284
3285 case Token::NOT: {
3286 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3287 if (context()->IsEffect()) {
3288 // Unary NOT has no side effects so it's only necessary to visit the
3289 // subexpression. Match the optimizing compiler by not branching.
3290 VisitForEffect(expr->expression());
3291 } else if (context()->IsTest()) {
3292 const TestContext* test = TestContext::cast(context());
3293 // The labels are swapped for the recursive call.
3294 VisitForControl(expr->expression(),
3295 test->false_label(),
3296 test->true_label(),
3297 test->fall_through());
3298 context()->Plug(test->true_label(), test->false_label());
3299 } else {
3300 // We handle value contexts explicitly rather than simply visiting
3301 // for control and plugging the control flow into the context,
3302 // because we need to prepare a pair of extra administrative AST ids
3303 // for the optimizing compiler.
3304 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3305 Label materialize_true, materialize_false, done;
3306 VisitForControl(expr->expression(),
3307 &materialize_false,
3308 &materialize_true,
3309 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003310 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003311 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003312 PrepareForBailoutForId(expr->MaterializeTrueId(),
3313 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003314 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3315 if (context()->IsStackValue()) __ push(v0);
3316 __ jmp(&done);
3317 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003318 PrepareForBailoutForId(expr->MaterializeFalseId(),
3319 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003320 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3321 if (context()->IsStackValue()) __ push(v0);
3322 __ bind(&done);
3323 }
3324 break;
3325 }
3326
3327 case Token::TYPEOF: {
3328 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3329 {
3330 AccumulatorValueContext context(this);
3331 VisitForTypeofValue(expr->expression());
3332 }
3333 __ mov(a3, v0);
3334 TypeofStub typeof_stub(isolate());
3335 __ CallStub(&typeof_stub);
3336 context()->Plug(v0);
3337 break;
3338 }
3339
3340 default:
3341 UNREACHABLE();
3342 }
3343}
3344
3345
3346void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3347 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3348
3349 Comment cmnt(masm_, "[ CountOperation");
3350
3351 Property* prop = expr->expression()->AsProperty();
3352 LhsKind assign_type = Property::GetAssignType(prop);
3353
3354 // Evaluate expression and get value.
3355 if (assign_type == VARIABLE) {
3356 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3357 AccumulatorValueContext context(this);
3358 EmitVariableLoad(expr->expression()->AsVariableProxy());
3359 } else {
3360 // Reserve space for result of postfix operation.
3361 if (expr->is_postfix() && !context()->IsEffect()) {
3362 __ li(at, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003363 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003364 }
3365 switch (assign_type) {
3366 case NAMED_PROPERTY: {
3367 // Put the object both on the stack and in the register.
3368 VisitForStackValue(prop->obj());
3369 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3370 EmitNamedPropertyLoad(prop);
3371 break;
3372 }
3373
3374 case NAMED_SUPER_PROPERTY: {
3375 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3376 VisitForAccumulatorValue(
3377 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003378 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003379 const Register scratch = a1;
3380 __ ld(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003381 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003382 EmitNamedSuperPropertyLoad(prop);
3383 break;
3384 }
3385
3386 case KEYED_SUPER_PROPERTY: {
3387 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3388 VisitForAccumulatorValue(
3389 prop->obj()->AsSuperPropertyReference()->home_object());
3390 const Register scratch = a1;
3391 const Register scratch1 = a4;
3392 __ Move(scratch, result_register());
3393 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003394 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003395 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003396 PushOperands(scratch1, scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003397 EmitKeyedSuperPropertyLoad(prop);
3398 break;
3399 }
3400
3401 case KEYED_PROPERTY: {
3402 VisitForStackValue(prop->obj());
3403 VisitForStackValue(prop->key());
3404 __ ld(LoadDescriptor::ReceiverRegister(),
3405 MemOperand(sp, 1 * kPointerSize));
3406 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3407 EmitKeyedPropertyLoad(prop);
3408 break;
3409 }
3410
3411 case VARIABLE:
3412 UNREACHABLE();
3413 }
3414 }
3415
3416 // We need a second deoptimization point after loading the value
3417 // in case evaluating the property load my have a side effect.
3418 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003419 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003420 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003421 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003422 }
3423
3424 // Inline smi case if we are in a loop.
3425 Label stub_call, done;
3426 JumpPatchSite patch_site(masm_);
3427
3428 int count_value = expr->op() == Token::INC ? 1 : -1;
3429 __ mov(a0, v0);
3430 if (ShouldInlineSmiCase(expr->op())) {
3431 Label slow;
3432 patch_site.EmitJumpIfNotSmi(v0, &slow);
3433
3434 // Save result for postfix expressions.
3435 if (expr->is_postfix()) {
3436 if (!context()->IsEffect()) {
3437 // Save the result on the stack. If we have a named or keyed property
3438 // we store the result under the receiver that is currently on top
3439 // of the stack.
3440 switch (assign_type) {
3441 case VARIABLE:
3442 __ push(v0);
3443 break;
3444 case NAMED_PROPERTY:
3445 __ sd(v0, MemOperand(sp, kPointerSize));
3446 break;
3447 case NAMED_SUPER_PROPERTY:
3448 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3449 break;
3450 case KEYED_PROPERTY:
3451 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3452 break;
3453 case KEYED_SUPER_PROPERTY:
3454 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3455 break;
3456 }
3457 }
3458 }
3459
3460 Register scratch1 = a1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003461 __ li(scratch1, Operand(Smi::FromInt(count_value)));
Ben Murdochda12d292016-06-02 14:46:10 +01003462 __ DaddBranchNoOvf(v0, v0, Operand(scratch1), &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003463 // Call stub. Undo operation first.
3464 __ Move(v0, a0);
3465 __ jmp(&stub_call);
3466 __ bind(&slow);
3467 }
Ben Murdochda12d292016-06-02 14:46:10 +01003468
3469 // Convert old value into a number.
3470 ToNumberStub convert_stub(isolate());
3471 __ CallStub(&convert_stub);
Ben Murdochc5610432016-08-08 18:44:38 +01003472 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003473
3474 // Save result for postfix expressions.
3475 if (expr->is_postfix()) {
3476 if (!context()->IsEffect()) {
3477 // Save the result on the stack. If we have a named or keyed property
3478 // we store the result under the receiver that is currently on top
3479 // of the stack.
3480 switch (assign_type) {
3481 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003482 PushOperand(v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003483 break;
3484 case NAMED_PROPERTY:
3485 __ sd(v0, MemOperand(sp, kPointerSize));
3486 break;
3487 case NAMED_SUPER_PROPERTY:
3488 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3489 break;
3490 case KEYED_PROPERTY:
3491 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3492 break;
3493 case KEYED_SUPER_PROPERTY:
3494 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3495 break;
3496 }
3497 }
3498 }
3499
3500 __ bind(&stub_call);
3501 __ mov(a1, v0);
3502 __ li(a0, Operand(Smi::FromInt(count_value)));
3503
3504 SetExpressionPosition(expr);
3505
Ben Murdoch097c5b22016-05-18 11:27:45 +01003506 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003507 CallIC(code, expr->CountBinOpFeedbackId());
3508 patch_site.EmitPatchInfo();
3509 __ bind(&done);
3510
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003511 // Store the value returned in v0.
3512 switch (assign_type) {
3513 case VARIABLE:
3514 if (expr->is_postfix()) {
3515 { EffectContext context(this);
3516 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3517 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003518 PrepareForBailoutForId(expr->AssignmentId(),
3519 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003520 context.Plug(v0);
3521 }
3522 // For all contexts except EffectConstant we have the result on
3523 // top of the stack.
3524 if (!context()->IsEffect()) {
3525 context()->PlugTOS();
3526 }
3527 } else {
3528 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3529 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003530 PrepareForBailoutForId(expr->AssignmentId(),
3531 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003532 context()->Plug(v0);
3533 }
3534 break;
3535 case NAMED_PROPERTY: {
3536 __ mov(StoreDescriptor::ValueRegister(), result_register());
3537 __ li(StoreDescriptor::NameRegister(),
3538 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003539 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003540 EmitLoadStoreICSlot(expr->CountSlot());
3541 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003542 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003543 if (expr->is_postfix()) {
3544 if (!context()->IsEffect()) {
3545 context()->PlugTOS();
3546 }
3547 } else {
3548 context()->Plug(v0);
3549 }
3550 break;
3551 }
3552 case NAMED_SUPER_PROPERTY: {
3553 EmitNamedSuperPropertyStore(prop);
3554 if (expr->is_postfix()) {
3555 if (!context()->IsEffect()) {
3556 context()->PlugTOS();
3557 }
3558 } else {
3559 context()->Plug(v0);
3560 }
3561 break;
3562 }
3563 case KEYED_SUPER_PROPERTY: {
3564 EmitKeyedSuperPropertyStore(prop);
3565 if (expr->is_postfix()) {
3566 if (!context()->IsEffect()) {
3567 context()->PlugTOS();
3568 }
3569 } else {
3570 context()->Plug(v0);
3571 }
3572 break;
3573 }
3574 case KEYED_PROPERTY: {
3575 __ mov(StoreDescriptor::ValueRegister(), result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003576 PopOperands(StoreDescriptor::ReceiverRegister(),
3577 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003578 Handle<Code> ic =
3579 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3580 EmitLoadStoreICSlot(expr->CountSlot());
3581 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003582 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003583 if (expr->is_postfix()) {
3584 if (!context()->IsEffect()) {
3585 context()->PlugTOS();
3586 }
3587 } else {
3588 context()->Plug(v0);
3589 }
3590 break;
3591 }
3592 }
3593}
3594
3595
3596void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3597 Expression* sub_expr,
3598 Handle<String> check) {
3599 Label materialize_true, materialize_false;
3600 Label* if_true = NULL;
3601 Label* if_false = NULL;
3602 Label* fall_through = NULL;
3603 context()->PrepareTest(&materialize_true, &materialize_false,
3604 &if_true, &if_false, &fall_through);
3605
3606 { AccumulatorValueContext context(this);
3607 VisitForTypeofValue(sub_expr);
3608 }
3609 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3610
3611 Factory* factory = isolate()->factory();
3612 if (String::Equals(check, factory->number_string())) {
3613 __ JumpIfSmi(v0, if_true);
3614 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3615 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3616 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3617 } else if (String::Equals(check, factory->string_string())) {
3618 __ JumpIfSmi(v0, if_false);
3619 __ GetObjectType(v0, v0, a1);
3620 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3621 fall_through);
3622 } else if (String::Equals(check, factory->symbol_string())) {
3623 __ JumpIfSmi(v0, if_false);
3624 __ GetObjectType(v0, v0, a1);
3625 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3626 } else if (String::Equals(check, factory->boolean_string())) {
3627 __ LoadRoot(at, Heap::kTrueValueRootIndex);
3628 __ Branch(if_true, eq, v0, Operand(at));
3629 __ LoadRoot(at, Heap::kFalseValueRootIndex);
3630 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3631 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003632 __ LoadRoot(at, Heap::kNullValueRootIndex);
3633 __ Branch(if_false, eq, v0, Operand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003634 __ JumpIfSmi(v0, if_false);
3635 // Check for undetectable objects => true.
3636 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3637 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3638 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3639 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3640 } else if (String::Equals(check, factory->function_string())) {
3641 __ JumpIfSmi(v0, if_false);
3642 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3643 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3644 __ And(a1, a1,
3645 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3646 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3647 fall_through);
3648 } else if (String::Equals(check, factory->object_string())) {
3649 __ JumpIfSmi(v0, if_false);
3650 __ LoadRoot(at, Heap::kNullValueRootIndex);
3651 __ Branch(if_true, eq, v0, Operand(at));
3652 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3653 __ GetObjectType(v0, v0, a1);
3654 __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3655 // Check for callable or undetectable objects => false.
3656 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3657 __ And(a1, a1,
3658 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3659 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3660// clang-format off
3661#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3662 } else if (String::Equals(check, factory->type##_string())) { \
3663 __ JumpIfSmi(v0, if_false); \
3664 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
3665 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
3666 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3667 SIMD128_TYPES(SIMD128_TYPE)
3668#undef SIMD128_TYPE
3669 // clang-format on
3670 } else {
3671 if (if_false != fall_through) __ jmp(if_false);
3672 }
3673 context()->Plug(if_true, if_false);
3674}
3675
3676
3677void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3678 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003679
3680 // First we try a fast inlined version of the compare when one of
3681 // the operands is a literal.
3682 if (TryLiteralCompare(expr)) return;
3683
3684 // Always perform the comparison for its control flow. Pack the result
3685 // into the expression's context after the comparison is performed.
3686 Label materialize_true, materialize_false;
3687 Label* if_true = NULL;
3688 Label* if_false = NULL;
3689 Label* fall_through = NULL;
3690 context()->PrepareTest(&materialize_true, &materialize_false,
3691 &if_true, &if_false, &fall_through);
3692
3693 Token::Value op = expr->op();
3694 VisitForStackValue(expr->left());
3695 switch (op) {
3696 case Token::IN:
3697 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003698 SetExpressionPosition(expr);
3699 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003700 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3701 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3702 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3703 break;
3704
3705 case Token::INSTANCEOF: {
3706 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003707 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003708 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003709 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003710 InstanceOfStub stub(isolate());
3711 __ CallStub(&stub);
3712 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3713 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3714 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3715 break;
3716 }
3717
3718 default: {
3719 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003720 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003721 Condition cc = CompareIC::ComputeCondition(op);
3722 __ mov(a0, result_register());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003723 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003724
3725 bool inline_smi_code = ShouldInlineSmiCase(op);
3726 JumpPatchSite patch_site(masm_);
3727 if (inline_smi_code) {
3728 Label slow_case;
3729 __ Or(a2, a0, Operand(a1));
3730 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
3731 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
3732 __ bind(&slow_case);
3733 }
3734
Ben Murdoch097c5b22016-05-18 11:27:45 +01003735 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003736 CallIC(ic, expr->CompareOperationFeedbackId());
3737 patch_site.EmitPatchInfo();
3738 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3739 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
3740 }
3741 }
3742
3743 // Convert the result of the comparison into one expected for this
3744 // expression's context.
3745 context()->Plug(if_true, if_false);
3746}
3747
3748
3749void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3750 Expression* sub_expr,
3751 NilValue nil) {
3752 Label materialize_true, materialize_false;
3753 Label* if_true = NULL;
3754 Label* if_false = NULL;
3755 Label* fall_through = NULL;
3756 context()->PrepareTest(&materialize_true, &materialize_false,
3757 &if_true, &if_false, &fall_through);
3758
3759 VisitForAccumulatorValue(sub_expr);
3760 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003761 if (expr->op() == Token::EQ_STRICT) {
3762 Heap::RootListIndex nil_value = nil == kNullValue ?
3763 Heap::kNullValueRootIndex :
3764 Heap::kUndefinedValueRootIndex;
3765 __ LoadRoot(a1, nil_value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003766 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
Ben Murdochda12d292016-06-02 14:46:10 +01003767 } else {
3768 __ JumpIfSmi(v0, if_false);
3769 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3770 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3771 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3772 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003773 }
3774 context()->Plug(if_true, if_false);
3775}
3776
3777
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003778Register FullCodeGenerator::result_register() {
3779 return v0;
3780}
3781
3782
3783Register FullCodeGenerator::context_register() {
3784 return cp;
3785}
3786
Ben Murdochda12d292016-06-02 14:46:10 +01003787void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3788 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3789 DCHECK(IsAligned(frame_offset, kPointerSize));
3790 // __ sw(value, MemOperand(fp, frame_offset));
3791 __ ld(value, MemOperand(fp, frame_offset));
3792}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003793
3794void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3795 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3796 DCHECK(IsAligned(frame_offset, kPointerSize));
3797 // __ sw(value, MemOperand(fp, frame_offset));
3798 __ sd(value, MemOperand(fp, frame_offset));
3799}
3800
3801
3802void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3803 __ ld(dst, ContextMemOperand(cp, context_index));
3804}
3805
3806
3807void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3808 Scope* closure_scope = scope()->ClosureScope();
3809 if (closure_scope->is_script_scope() ||
3810 closure_scope->is_module_scope()) {
3811 // Contexts nested in the native context have a canonical empty function
3812 // as their closure, not the anonymous closure containing the global
3813 // code.
3814 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
3815 } else if (closure_scope->is_eval_scope()) {
3816 // Contexts created by a call to eval have the same closure as the
3817 // context calling eval, not the anonymous closure containing the eval
3818 // code. Fetch it from the context.
3819 __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3820 } else {
3821 DCHECK(closure_scope->is_function_scope());
3822 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3823 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003824 PushOperand(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003825}
3826
3827
3828// ----------------------------------------------------------------------------
3829// Non-local control flow support.
3830
3831void FullCodeGenerator::EnterFinallyBlock() {
3832 DCHECK(!result_register().is(a1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003833 // Store pending message while executing finally block.
3834 ExternalReference pending_message_obj =
3835 ExternalReference::address_of_pending_message_obj(isolate());
3836 __ li(at, Operand(pending_message_obj));
3837 __ ld(a1, MemOperand(at));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003838 PushOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003839
3840 ClearPendingMessage();
3841}
3842
3843
3844void FullCodeGenerator::ExitFinallyBlock() {
3845 DCHECK(!result_register().is(a1));
3846 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003847 PopOperand(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003848 ExternalReference pending_message_obj =
3849 ExternalReference::address_of_pending_message_obj(isolate());
3850 __ li(at, Operand(pending_message_obj));
3851 __ sd(a1, MemOperand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003852}
3853
3854
3855void FullCodeGenerator::ClearPendingMessage() {
3856 DCHECK(!result_register().is(a1));
3857 ExternalReference pending_message_obj =
3858 ExternalReference::address_of_pending_message_obj(isolate());
3859 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
3860 __ li(at, Operand(pending_message_obj));
3861 __ sd(a1, MemOperand(at));
3862}
3863
3864
Ben Murdoch097c5b22016-05-18 11:27:45 +01003865void FullCodeGenerator::DeferredCommands::EmitCommands() {
3866 __ Pop(result_register()); // Restore the accumulator.
3867 __ Pop(a1); // Get the token.
3868 for (DeferredCommand cmd : commands_) {
3869 Label skip;
3870 __ li(at, Operand(Smi::FromInt(cmd.token)));
3871 __ Branch(&skip, ne, a1, Operand(at));
3872 switch (cmd.command) {
3873 case kReturn:
3874 codegen_->EmitUnwindAndReturn();
3875 break;
3876 case kThrow:
3877 __ Push(result_register());
3878 __ CallRuntime(Runtime::kReThrow);
3879 break;
3880 case kContinue:
3881 codegen_->EmitContinue(cmd.target);
3882 break;
3883 case kBreak:
3884 codegen_->EmitBreak(cmd.target);
3885 break;
3886 }
3887 __ bind(&skip);
3888 }
3889}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003890
3891#undef __
3892
3893
3894void BackEdgeTable::PatchAt(Code* unoptimized_code,
3895 Address pc,
3896 BackEdgeState target_state,
3897 Code* replacement_code) {
3898 static const int kInstrSize = Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01003899 Address pc_immediate_load_address =
3900 Assembler::target_address_from_return_address(pc);
3901 Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003902 Isolate* isolate = unoptimized_code->GetIsolate();
3903 CodePatcher patcher(isolate, branch_address, 1);
3904
3905 switch (target_state) {
3906 case INTERRUPT:
3907 // slt at, a3, zero_reg (in case of count based interrupts)
3908 // beq at, zero_reg, ok
3909 // lui t9, <interrupt stub address> upper
3910 // ori t9, <interrupt stub address> u-middle
3911 // dsll t9, t9, 16
3912 // ori t9, <interrupt stub address> lower
3913 // jalr t9
3914 // nop
3915 // ok-label ----- pc_after points here
3916 patcher.masm()->slt(at, a3, zero_reg);
3917 break;
3918 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003919 // addiu at, zero_reg, 1
3920 // beq at, zero_reg, ok ;; Not changed
3921 // lui t9, <on-stack replacement address> upper
3922 // ori t9, <on-stack replacement address> middle
3923 // dsll t9, t9, 16
3924 // ori t9, <on-stack replacement address> lower
3925 // jalr t9 ;; Not changed
3926 // nop ;; Not changed
3927 // ok-label ----- pc_after points here
3928 patcher.masm()->daddiu(at, zero_reg, 1);
3929 break;
3930 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003931 // Replace the stack check address in the load-immediate (6-instr sequence)
3932 // with the entry address of the replacement code.
3933 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3934 replacement_code->entry());
3935
3936 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3937 unoptimized_code, pc_immediate_load_address, replacement_code);
3938}
3939
3940
3941BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3942 Isolate* isolate,
3943 Code* unoptimized_code,
3944 Address pc) {
3945 static const int kInstrSize = Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01003946 Address pc_immediate_load_address =
3947 Assembler::target_address_from_return_address(pc);
3948 Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003949
Ben Murdochda12d292016-06-02 14:46:10 +01003950 DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003951 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
3952 DCHECK(reinterpret_cast<uint64_t>(
3953 Assembler::target_address_at(pc_immediate_load_address)) ==
3954 reinterpret_cast<uint64_t>(
3955 isolate->builtins()->InterruptCheck()->entry()));
3956 return INTERRUPT;
3957 }
3958
3959 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
3960
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003961 DCHECK(reinterpret_cast<uint64_t>(
Ben Murdochda12d292016-06-02 14:46:10 +01003962 Assembler::target_address_at(pc_immediate_load_address)) ==
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003963 reinterpret_cast<uint64_t>(
Ben Murdochda12d292016-06-02 14:46:10 +01003964 isolate->builtins()->OnStackReplacement()->entry()));
3965 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003966}
3967
3968
3969} // namespace internal
3970} // namespace v8
3971
3972#endif // V8_TARGET_ARCH_MIPS64