blob: c1a829148048addf289e22c3f97b4f3942154f4d [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Andrei Popescu31002712010-02-23 13:46:05 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Steve Block44f0eee2011-05-26 01:26:41 +01009// Note on Mips implementation:
10//
11// The result_register() for mips is the 'v0' register, which is defined
12// by the ABI to contain function return values. However, the first
13// parameter to a function is defined to be 'a0'. So there are many
14// places where we have to move a previous result in v0 to a0 for the
15// next call: mov(a0, v0). This is not needed on the other architectures.
16
Ben Murdochb8a8cc12014-11-26 15:28:44 +000017#include "src/code-factory.h"
18#include "src/code-stubs.h"
19#include "src/codegen.h"
20#include "src/compiler.h"
21#include "src/debug.h"
22#include "src/full-codegen.h"
23#include "src/ic/ic.h"
24#include "src/isolate-inl.h"
25#include "src/parser.h"
26#include "src/scopes.h"
Steve Block44f0eee2011-05-26 01:26:41 +010027
Ben Murdochb8a8cc12014-11-26 15:28:44 +000028#include "src/mips/code-stubs-mips.h"
29#include "src/mips/macro-assembler-mips.h"
Andrei Popescu31002712010-02-23 13:46:05 +000030
31namespace v8 {
32namespace internal {
33
34#define __ ACCESS_MASM(masm_)
35
Ben Murdoch257744e2011-11-30 15:57:28 +000036
Ben Murdoch257744e2011-11-30 15:57:28 +000037// A patch site is a location in the code which it is possible to patch. This
38// class has a number of methods to emit the code which is patchable and the
39// method EmitPatchInfo to record a marker back to the patchable code. This
Ben Murdoch3ef787d2012-04-12 10:51:47 +010040// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41// (raw 16 bit immediate value is used) is the delta from the pc to the first
Ben Murdoch257744e2011-11-30 15:57:28 +000042// instruction of the patchable code.
Ben Murdoch3ef787d2012-04-12 10:51:47 +010043// The marker instruction is effectively a NOP (dest is zero_reg) and will
44// never be emitted by normal code.
Ben Murdoch257744e2011-11-30 15:57:28 +000045class JumpPatchSite BASE_EMBEDDED {
46 public:
47 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
48#ifdef DEBUG
49 info_emitted_ = false;
50#endif
51 }
52
53 ~JumpPatchSite() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054 DCHECK(patch_site_.is_bound() == info_emitted_);
Ben Murdoch257744e2011-11-30 15:57:28 +000055 }
56
57 // When initially emitting this ensure that a jump is always generated to skip
58 // the inlined smi code.
59 void EmitJumpIfNotSmi(Register reg, Label* target) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 DCHECK(!patch_site_.is_bound() && !info_emitted_);
Ben Murdoch257744e2011-11-30 15:57:28 +000061 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62 __ bind(&patch_site_);
63 __ andi(at, reg, 0);
64 // Always taken before patched.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000065 __ BranchShort(target, eq, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +000066 }
67
68 // When initially emitting this ensure that a jump is never generated to skip
69 // the inlined smi code.
70 void EmitJumpIfSmi(Register reg, Label* target) {
71 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072 DCHECK(!patch_site_.is_bound() && !info_emitted_);
Ben Murdoch257744e2011-11-30 15:57:28 +000073 __ bind(&patch_site_);
74 __ andi(at, reg, 0);
75 // Never taken before patched.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 __ BranchShort(target, ne, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +000077 }
78
79 void EmitPatchInfo() {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000080 if (patch_site_.is_bound()) {
81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010083 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
Ben Murdoch257744e2011-11-30 15:57:28 +000084#ifdef DEBUG
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000085 info_emitted_ = true;
Ben Murdoch257744e2011-11-30 15:57:28 +000086#endif
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000087 } else {
88 __ nop(); // Signals no inlined code.
89 }
Ben Murdoch257744e2011-11-30 15:57:28 +000090 }
91
Ben Murdoch257744e2011-11-30 15:57:28 +000092 private:
93 MacroAssembler* masm_;
94 Label patch_site_;
95#ifdef DEBUG
96 bool info_emitted_;
97#endif
98};
99
100
Steve Block44f0eee2011-05-26 01:26:41 +0100101// Generate code for a JS function. On entry to the function the receiver
102// and arguments have been pushed on the stack left to right. The actual
103// argument count matches the formal parameter count expected by the
104// function.
105//
106// The live registers are:
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100107// o a1: the JS function object being called (i.e. ourselves)
Steve Block44f0eee2011-05-26 01:26:41 +0100108// o cp: our context
109// o fp: our caller's frame pointer
110// o sp: stack pointer
111// o ra: return address
112//
113// The function builds a JS frame. Please see JavaScriptFrameConstants in
114// frames-mips.h for its layout.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100115void FullCodeGenerator::Generate() {
116 CompilationInfo* info = info_;
117 handler_table_ =
118 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119
120 profiling_counter_ = isolate()->factory()->NewCell(
121 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
Ben Murdoch257744e2011-11-30 15:57:28 +0000122 SetFunctionPosition(function());
123 Comment cmnt(masm_, "[ function compiled by full code generator");
124
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
126
Ben Murdoch257744e2011-11-30 15:57:28 +0000127#ifdef DEBUG
128 if (strlen(FLAG_stop_at) > 0 &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000130 __ stop("stop-at");
131 }
132#endif
133
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000134 // Sloppy mode functions and builtins need to replace the receiver with the
135 // global proxy when called as functions (without an explicit receiver
136 // object).
137 if (info->strict_mode() == SLOPPY && !info->is_native()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000138 Label ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000139 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000140 __ lw(at, MemOperand(sp, receiver_offset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 __ Branch(&ok, ne, a2, Operand(at));
143
144 __ lw(a2, GlobalObjectOperand());
145 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
146
Ben Murdoch257744e2011-11-30 15:57:28 +0000147 __ sw(a2, MemOperand(sp, receiver_offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000148
Ben Murdoch257744e2011-11-30 15:57:28 +0000149 __ bind(&ok);
150 }
151
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100152 // Open a frame scope to indicate that there is a frame on the stack. The
153 // MANUAL indicates that the scope shouldn't actually generate code to set up
154 // the frame (that is done below).
155 FrameScope frame_scope(masm_, StackFrame::MANUAL);
156
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000157 info->set_prologue_offset(masm_->pc_offset());
158 __ Prologue(info->IsCodePreAgingActive());
159 info->AddNoFrameRange(0, masm_->pc_offset());
Ben Murdoch257744e2011-11-30 15:57:28 +0000160
161 { Comment cmnt(masm_, "[ Allocate locals");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 int locals_count = info->scope()->num_stack_slots();
163 // Generators allocate locals, if any, in context slots.
164 DCHECK(!info->function()->is_generator() || locals_count == 0);
165 if (locals_count > 0) {
166 if (locals_count >= 128) {
167 Label ok;
168 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
169 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
170 __ Branch(&ok, hs, t5, Operand(a2));
171 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
172 __ bind(&ok);
173 }
174 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
175 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
176 if (locals_count >= kMaxPushes) {
177 int loop_iterations = locals_count / kMaxPushes;
178 __ li(a2, Operand(loop_iterations));
179 Label loop_header;
180 __ bind(&loop_header);
181 // Do pushes.
182 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
183 for (int i = 0; i < kMaxPushes; i++) {
184 __ sw(t5, MemOperand(sp, i * kPointerSize));
185 }
186 // Continue loop if not done.
187 __ Subu(a2, a2, Operand(1));
188 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
189 }
190 int remaining = locals_count % kMaxPushes;
191 // Emit the remaining pushes.
192 __ Subu(sp, sp, Operand(remaining * kPointerSize));
193 for (int i = 0; i < remaining; i++) {
194 __ sw(t5, MemOperand(sp, i * kPointerSize));
195 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000196 }
197 }
198
199 bool function_in_register = true;
200
201 // Possibly allocate a local context.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000202 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
Ben Murdoch257744e2011-11-30 15:57:28 +0000203 if (heap_slots > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204 Comment cmnt(masm_, "[ Allocate context");
205 // Argument to NewContext is the function, which is still in a1.
206 bool need_write_barrier = true;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400207 if (FLAG_harmony_scoping && info->scope()->is_script_scope()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000208 __ push(a1);
209 __ Push(info->scope()->GetScopeInfo());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400210 __ CallRuntime(Runtime::kNewScriptContext, 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
212 FastNewContextStub stub(isolate(), heap_slots);
Ben Murdoch257744e2011-11-30 15:57:28 +0000213 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214 // Result of FastNewContextStub is always in new space.
215 need_write_barrier = false;
Ben Murdoch257744e2011-11-30 15:57:28 +0000216 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 __ push(a1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000218 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000219 }
220 function_in_register = false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000221 // Context is returned in v0. It replaces the context passed to us.
222 // It's saved in the stack and kept live in cp.
223 __ mov(cp, v0);
224 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000225 // Copy any necessary parameters into the context.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000226 int num_parameters = info->scope()->num_parameters();
Ben Murdoch257744e2011-11-30 15:57:28 +0000227 for (int i = 0; i < num_parameters; i++) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000228 Variable* var = scope()->parameter(i);
229 if (var->IsContextSlot()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000230 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
231 (num_parameters - 1 - i) * kPointerSize;
232 // Load parameter from stack.
233 __ lw(a0, MemOperand(fp, parameter_offset));
234 // Store it in the context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100235 MemOperand target = ContextOperand(cp, var->index());
236 __ sw(a0, target);
237
238 // Update the write barrier.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239 if (need_write_barrier) {
240 __ RecordWriteContextSlot(
241 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
242 } else if (FLAG_debug_code) {
243 Label done;
244 __ JumpIfInNewSpace(cp, a0, &done);
245 __ Abort(kExpectedNewSpaceObject);
246 __ bind(&done);
247 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000248 }
249 }
250 }
251
252 Variable* arguments = scope()->arguments();
253 if (arguments != NULL) {
254 // Function uses arguments object.
255 Comment cmnt(masm_, "[ Allocate arguments object");
256 if (!function_in_register) {
257 // Load this again, if it's used by the local context below.
258 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
259 } else {
260 __ mov(a3, a1);
261 }
262 // Receiver is just before the parameters on the caller's stack.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000263 int num_parameters = info->scope()->num_parameters();
264 int offset = num_parameters * kPointerSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000265 __ Addu(a2, fp,
266 Operand(StandardFrameConstants::kCallerSPOffset + offset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000267 __ li(a1, Operand(Smi::FromInt(num_parameters)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000268 __ Push(a3, a2, a1);
269
270 // Arguments to ArgumentsAccessStub:
271 // function, receiver address, parameter count.
272 // The stub will rewrite receiever and parameter count if the previous
273 // stack frame was an arguments adapter frame.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000274 ArgumentsAccessStub::Type type;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 if (strict_mode() == STRICT) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000276 type = ArgumentsAccessStub::NEW_STRICT;
277 } else if (function()->has_duplicate_parameters()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000279 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000281 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 ArgumentsAccessStub stub(isolate(), type);
Ben Murdoch257744e2011-11-30 15:57:28 +0000283 __ CallStub(&stub);
284
Ben Murdoch589d6972011-11-30 16:04:58 +0000285 SetVar(arguments, v0, a1, a2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000286 }
287
288 if (FLAG_trace) {
289 __ CallRuntime(Runtime::kTraceEnter, 0);
290 }
291
292 // Visit the declarations and body unless there is an illegal
293 // redeclaration.
294 if (scope()->HasIllegalRedeclaration()) {
295 Comment cmnt(masm_, "[ Declarations");
296 scope()->VisitIllegalRedeclaration(this);
297
298 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000299 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
Ben Murdoch257744e2011-11-30 15:57:28 +0000300 { Comment cmnt(masm_, "[ Declarations");
301 // For named function expressions, declare the function name as a
302 // constant.
303 if (scope()->is_function_scope() && scope()->function() != NULL) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304 VariableDeclaration* function = scope()->function();
305 DCHECK(function->proxy()->var()->mode() == CONST ||
306 function->proxy()->var()->mode() == CONST_LEGACY);
307 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
308 VisitVariableDeclaration(function);
Ben Murdoch257744e2011-11-30 15:57:28 +0000309 }
310 VisitDeclarations(scope()->declarations());
311 }
312
313 { Comment cmnt(masm_, "[ Stack check");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000314 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
Ben Murdoch257744e2011-11-30 15:57:28 +0000315 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 __ LoadRoot(at, Heap::kStackLimitRootIndex);
317 __ Branch(&ok, hs, sp, Operand(at));
318 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
319 PredictableCodeSizeScope predictable(masm_,
320 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
321 __ Call(stack_check, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +0000322 __ bind(&ok);
323 }
324
325 { Comment cmnt(masm_, "[ Body");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 DCHECK(loop_depth() == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000327 VisitStatements(function()->body());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 DCHECK(loop_depth() == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000329 }
330 }
331
332 // Always emit a 'return undefined' in case control fell off the end of
333 // the body.
334 { Comment cmnt(masm_, "[ return <undefined>;");
335 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
336 }
337 EmitReturnSequence();
Steve Block44f0eee2011-05-26 01:26:41 +0100338}
339
340
341void FullCodeGenerator::ClearAccumulator() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000342 DCHECK(Smi::FromInt(0) == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000343 __ mov(v0, zero_reg);
Steve Block44f0eee2011-05-26 01:26:41 +0100344}
345
346
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100347void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
348 __ li(a2, Operand(profiling_counter_));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100350 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000351 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100352}
353
354
355void FullCodeGenerator::EmitProfilingCounterReset() {
356 int reset_value = FLAG_interrupt_budget;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357 if (info_->is_debug()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100358 // Detect debug break requests as soon as possible.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000359 reset_value = FLAG_interrupt_budget >> 4;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100360 }
361 __ li(a2, Operand(profiling_counter_));
362 __ li(a3, Operand(Smi::FromInt(reset_value)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100364}
365
366
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
368 Label* back_edge_target) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100369 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
370 // to make sure it is constant. Branch may emit a skip-or-jump sequence
371 // instead of the normal Branch. It seems that the "skip" part of that
372 // sequence is about as long as this Branch would be so it is safe to ignore
373 // that.
374 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000375 Comment cmnt(masm_, "[ Back edge bookkeeping");
Ben Murdoch257744e2011-11-30 15:57:28 +0000376 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377 DCHECK(back_edge_target->is_bound());
378 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
379 int weight = Min(kMaxBackEdgeWeight,
380 Max(1, distance / kCodeSizeMultiplier));
381 EmitProfilingCounterDecrement(weight);
382 __ slt(at, a3, zero_reg);
383 __ beq(at, zero_reg, &ok);
384 // Call will emit a li t9 first, so it is safe to use the delay slot.
385 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +0000386 // Record a mapping of this PC offset to the OSR id. This is used to find
387 // the AST id from the unoptimized code in order to use it as a key into
388 // the deoptimization input data found in the optimized code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000389 RecordBackEdge(stmt->OsrEntryId());
390 EmitProfilingCounterReset();
Ben Murdoch257744e2011-11-30 15:57:28 +0000391
Ben Murdoch257744e2011-11-30 15:57:28 +0000392 __ bind(&ok);
393 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
394 // Record a mapping of the OSR id to this PC. This is used if the OSR
395 // entry becomes the target of a bailout. We don't expect it to be, but
396 // we want it to work if it is.
397 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
Andrei Popescu31002712010-02-23 13:46:05 +0000398}
399
400
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100401void FullCodeGenerator::EmitReturnSequence() {
Ben Murdoch257744e2011-11-30 15:57:28 +0000402 Comment cmnt(masm_, "[ Return sequence");
403 if (return_label_.is_bound()) {
404 __ Branch(&return_label_);
405 } else {
406 __ bind(&return_label_);
407 if (FLAG_trace) {
408 // Push the return value on the stack as the parameter.
409 // Runtime::TraceExit returns its parameter in v0.
410 __ push(v0);
411 __ CallRuntime(Runtime::kTraceExit, 1);
412 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000413 // Pretend that the exit is a backwards jump to the entry.
414 int weight = 1;
415 if (info_->ShouldSelfOptimize()) {
416 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
417 } else {
418 int distance = masm_->pc_offset();
419 weight = Min(kMaxBackEdgeWeight,
420 Max(1, distance / kCodeSizeMultiplier));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100421 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000422 EmitProfilingCounterDecrement(weight);
423 Label ok;
424 __ Branch(&ok, ge, a3, Operand(zero_reg));
425 __ push(v0);
426 __ Call(isolate()->builtins()->InterruptCheck(),
427 RelocInfo::CODE_TARGET);
428 __ pop(v0);
429 EmitProfilingCounterReset();
430 __ bind(&ok);
Ben Murdoch257744e2011-11-30 15:57:28 +0000431
432#ifdef DEBUG
433 // Add a label for checking the size of the code used for returning.
434 Label check_exit_codesize;
435 masm_->bind(&check_exit_codesize);
436#endif
437 // Make sure that the constant pool is not emitted inside of the return
438 // sequence.
439 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
440 // Here we use masm_-> instead of the __ macro to avoid the code coverage
441 // tool from instrumenting as we rely on the code size here.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000442 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000443 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
444 __ RecordJSReturn();
445 masm_->mov(sp, fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000446 int no_frame_start = masm_->pc_offset();
Ben Murdoch257744e2011-11-30 15:57:28 +0000447 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
448 masm_->Addu(sp, sp, Operand(sp_delta));
449 masm_->Jump(ra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
Ben Murdoch257744e2011-11-30 15:57:28 +0000451 }
452
453#ifdef DEBUG
454 // Check that the size of the code used for returning is large enough
455 // for the debugger's requirements.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000456 DCHECK(Assembler::kJSReturnSequenceInstructions <=
Ben Murdoch257744e2011-11-30 15:57:28 +0000457 masm_->InstructionsGeneratedSince(&check_exit_codesize));
458#endif
459 }
Andrei Popescu31002712010-02-23 13:46:05 +0000460}
461
462
Ben Murdoch589d6972011-11-30 16:04:58 +0000463void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000464 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
Andrei Popescu31002712010-02-23 13:46:05 +0000465}
466
467
Ben Murdoch589d6972011-11-30 16:04:58 +0000468void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
Ben Murdoch589d6972011-11-30 16:04:58 +0000470 codegen()->GetVar(result_register(), var);
Andrei Popescu31002712010-02-23 13:46:05 +0000471}
472
473
Ben Murdoch589d6972011-11-30 16:04:58 +0000474void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
Ben Murdoch589d6972011-11-30 16:04:58 +0000476 codegen()->GetVar(result_register(), var);
Ben Murdoch257744e2011-11-30 15:57:28 +0000477 __ push(result_register());
Andrei Popescu31002712010-02-23 13:46:05 +0000478}
479
480
Ben Murdoch589d6972011-11-30 16:04:58 +0000481void FullCodeGenerator::TestContext::Plug(Variable* var) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000482 // For simplicity we always test the accumulator register.
Ben Murdoch589d6972011-11-30 16:04:58 +0000483 codegen()->GetVar(result_register(), var);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100484 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000485 codegen()->DoTest(this);
Andrei Popescu31002712010-02-23 13:46:05 +0000486}
487
488
Steve Block44f0eee2011-05-26 01:26:41 +0100489void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
Andrei Popescu31002712010-02-23 13:46:05 +0000490}
491
492
Steve Block44f0eee2011-05-26 01:26:41 +0100493void FullCodeGenerator::AccumulatorValueContext::Plug(
494 Heap::RootListIndex index) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000495 __ LoadRoot(result_register(), index);
Steve Block44f0eee2011-05-26 01:26:41 +0100496}
497
498
499void FullCodeGenerator::StackValueContext::Plug(
500 Heap::RootListIndex index) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000501 __ LoadRoot(result_register(), index);
502 __ push(result_register());
Steve Block44f0eee2011-05-26 01:26:41 +0100503}
504
505
506void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100507 codegen()->PrepareForBailoutBeforeSplit(condition(),
Ben Murdoch257744e2011-11-30 15:57:28 +0000508 true,
509 true_label_,
510 false_label_);
511 if (index == Heap::kUndefinedValueRootIndex ||
512 index == Heap::kNullValueRootIndex ||
513 index == Heap::kFalseValueRootIndex) {
514 if (false_label_ != fall_through_) __ Branch(false_label_);
515 } else if (index == Heap::kTrueValueRootIndex) {
516 if (true_label_ != fall_through_) __ Branch(true_label_);
517 } else {
518 __ LoadRoot(result_register(), index);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000519 codegen()->DoTest(this);
Ben Murdoch257744e2011-11-30 15:57:28 +0000520 }
Steve Block44f0eee2011-05-26 01:26:41 +0100521}
522
523
524void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
Steve Block44f0eee2011-05-26 01:26:41 +0100525}
526
527
528void FullCodeGenerator::AccumulatorValueContext::Plug(
529 Handle<Object> lit) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000530 __ li(result_register(), Operand(lit));
Steve Block44f0eee2011-05-26 01:26:41 +0100531}
532
533
534void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000535 // Immediates cannot be pushed directly.
536 __ li(result_register(), Operand(lit));
537 __ push(result_register());
Steve Block44f0eee2011-05-26 01:26:41 +0100538}
539
540
541void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100542 codegen()->PrepareForBailoutBeforeSplit(condition(),
Ben Murdoch257744e2011-11-30 15:57:28 +0000543 true,
544 true_label_,
545 false_label_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
Ben Murdoch257744e2011-11-30 15:57:28 +0000547 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548 if (false_label_ != fall_through_) __ Branch(false_label_);
549 } else if (lit->IsTrue() || lit->IsJSObject()) {
550 if (true_label_ != fall_through_) __ Branch(true_label_);
551 } else if (lit->IsString()) {
552 if (String::cast(*lit)->length() == 0) {
553 if (false_label_ != fall_through_) __ Branch(false_label_);
554 } else {
555 if (true_label_ != fall_through_) __ Branch(true_label_);
556 }
557 } else if (lit->IsSmi()) {
558 if (Smi::cast(*lit)->value() == 0) {
559 if (false_label_ != fall_through_) __ Branch(false_label_);
560 } else {
561 if (true_label_ != fall_through_) __ Branch(true_label_);
562 }
563 } else {
564 // For simplicity we always test the accumulator register.
565 __ li(result_register(), Operand(lit));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000566 codegen()->DoTest(this);
Ben Murdoch257744e2011-11-30 15:57:28 +0000567 }
Steve Block44f0eee2011-05-26 01:26:41 +0100568}
569
570
571void FullCodeGenerator::EffectContext::DropAndPlug(int count,
572 Register reg) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000573 DCHECK(count > 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000574 __ Drop(count);
Steve Block44f0eee2011-05-26 01:26:41 +0100575}
576
577
578void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
579 int count,
580 Register reg) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000581 DCHECK(count > 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000582 __ Drop(count);
583 __ Move(result_register(), reg);
Steve Block44f0eee2011-05-26 01:26:41 +0100584}
585
586
587void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
588 Register reg) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 DCHECK(count > 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000590 if (count > 1) __ Drop(count - 1);
591 __ sw(reg, MemOperand(sp, 0));
Steve Block44f0eee2011-05-26 01:26:41 +0100592}
593
594
595void FullCodeGenerator::TestContext::DropAndPlug(int count,
596 Register reg) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597 DCHECK(count > 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000598 // For simplicity we always test the accumulator register.
599 __ Drop(count);
600 __ Move(result_register(), reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100601 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000602 codegen()->DoTest(this);
Steve Block44f0eee2011-05-26 01:26:41 +0100603}
604
605
606void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
607 Label* materialize_false) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000608 DCHECK(materialize_true == materialize_false);
Ben Murdoch257744e2011-11-30 15:57:28 +0000609 __ bind(materialize_true);
Steve Block44f0eee2011-05-26 01:26:41 +0100610}
611
612
613void FullCodeGenerator::AccumulatorValueContext::Plug(
614 Label* materialize_true,
615 Label* materialize_false) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000616 Label done;
617 __ bind(materialize_true);
618 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
619 __ Branch(&done);
620 __ bind(materialize_false);
621 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
622 __ bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100623}
624
625
626void FullCodeGenerator::StackValueContext::Plug(
627 Label* materialize_true,
628 Label* materialize_false) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000629 Label done;
630 __ bind(materialize_true);
631 __ LoadRoot(at, Heap::kTrueValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000632 // Push the value as the following branch can clobber at in long branch mode.
Ben Murdoch257744e2011-11-30 15:57:28 +0000633 __ push(at);
634 __ Branch(&done);
635 __ bind(materialize_false);
636 __ LoadRoot(at, Heap::kFalseValueRootIndex);
637 __ push(at);
638 __ bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100639}
640
641
642void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
643 Label* materialize_false) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 DCHECK(materialize_true == true_label_);
645 DCHECK(materialize_false == false_label_);
Steve Block44f0eee2011-05-26 01:26:41 +0100646}
647
648
649void FullCodeGenerator::EffectContext::Plug(bool flag) const {
Steve Block44f0eee2011-05-26 01:26:41 +0100650}
651
652
653void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000654 Heap::RootListIndex value_root_index =
655 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
656 __ LoadRoot(result_register(), value_root_index);
Steve Block44f0eee2011-05-26 01:26:41 +0100657}
658
659
660void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000661 Heap::RootListIndex value_root_index =
662 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663 __ LoadRoot(at, value_root_index);
664 __ push(at);
Steve Block44f0eee2011-05-26 01:26:41 +0100665}
666
667
668void FullCodeGenerator::TestContext::Plug(bool flag) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100669 codegen()->PrepareForBailoutBeforeSplit(condition(),
Ben Murdoch257744e2011-11-30 15:57:28 +0000670 true,
671 true_label_,
672 false_label_);
673 if (flag) {
674 if (true_label_ != fall_through_) __ Branch(true_label_);
675 } else {
676 if (false_label_ != fall_through_) __ Branch(false_label_);
677 }
Steve Block44f0eee2011-05-26 01:26:41 +0100678}
679
680
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000681void FullCodeGenerator::DoTest(Expression* condition,
682 Label* if_true,
Steve Block44f0eee2011-05-26 01:26:41 +0100683 Label* if_false,
684 Label* fall_through) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000685 __ mov(a0, result_register());
686 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
687 CallIC(ic, condition->test_id());
688 __ mov(at, zero_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +0000689 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
Steve Block44f0eee2011-05-26 01:26:41 +0100690}
691
692
Steve Block44f0eee2011-05-26 01:26:41 +0100693void FullCodeGenerator::Split(Condition cc,
Ben Murdoch257744e2011-11-30 15:57:28 +0000694 Register lhs,
695 const Operand& rhs,
Steve Block44f0eee2011-05-26 01:26:41 +0100696 Label* if_true,
697 Label* if_false,
698 Label* fall_through) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000699 if (if_false == fall_through) {
700 __ Branch(if_true, cc, lhs, rhs);
701 } else if (if_true == fall_through) {
702 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
703 } else {
704 __ Branch(if_true, cc, lhs, rhs);
705 __ Branch(if_false);
706 }
Andrei Popescu31002712010-02-23 13:46:05 +0000707}
708
709
Ben Murdoch589d6972011-11-30 16:04:58 +0000710MemOperand FullCodeGenerator::StackOperand(Variable* var) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711 DCHECK(var->IsStackAllocated());
Ben Murdoch589d6972011-11-30 16:04:58 +0000712 // Offset is negative because higher indexes are at lower addresses.
713 int offset = -var->index() * kPointerSize;
714 // Adjust by a (parameter or local) base offset.
715 if (var->IsParameter()) {
716 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
717 } else {
718 offset += JavaScriptFrameConstants::kLocal0Offset;
Ben Murdoch257744e2011-11-30 15:57:28 +0000719 }
Ben Murdoch589d6972011-11-30 16:04:58 +0000720 return MemOperand(fp, offset);
Andrei Popescu31002712010-02-23 13:46:05 +0000721}
722
723
Ben Murdoch589d6972011-11-30 16:04:58 +0000724MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
Ben Murdoch589d6972011-11-30 16:04:58 +0000726 if (var->IsContextSlot()) {
727 int context_chain_length = scope()->ContextChainLength(var->scope());
728 __ LoadContext(scratch, context_chain_length);
729 return ContextOperand(scratch, var->index());
730 } else {
731 return StackOperand(var);
732 }
733}
734
735
736void FullCodeGenerator::GetVar(Register dest, Variable* var) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000737 // Use destination as scratch.
Ben Murdoch589d6972011-11-30 16:04:58 +0000738 MemOperand location = VarOperand(var, dest);
739 __ lw(dest, location);
740}
741
742
743void FullCodeGenerator::SetVar(Variable* var,
744 Register src,
745 Register scratch0,
746 Register scratch1) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000747 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
748 DCHECK(!scratch0.is(src));
749 DCHECK(!scratch0.is(scratch1));
750 DCHECK(!scratch1.is(src));
Ben Murdoch589d6972011-11-30 16:04:58 +0000751 MemOperand location = VarOperand(var, scratch0);
752 __ sw(src, location);
753 // Emit the write barrier code if the location is in the heap.
754 if (var->IsContextSlot()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100755 __ RecordWriteContextSlot(scratch0,
756 location.offset(),
757 src,
758 scratch1,
759 kRAHasBeenSaved,
760 kDontSaveFPRegs);
Ben Murdoch589d6972011-11-30 16:04:58 +0000761 }
Andrei Popescu31002712010-02-23 13:46:05 +0000762}
763
764
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100765void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
Steve Block44f0eee2011-05-26 01:26:41 +0100766 bool should_normalize,
767 Label* if_true,
768 Label* if_false) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000769 // Only prepare for bailouts before splits if we're in a test
770 // context. Otherwise, we let the Visit function deal with the
771 // preparation to avoid preparing with the same AST id twice.
772 if (!context()->IsTest() || !info_->IsOptimizable()) return;
773
774 Label skip;
775 if (should_normalize) __ Branch(&skip);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100776 PrepareForBailout(expr, TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +0000777 if (should_normalize) {
778 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
779 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
780 __ bind(&skip);
781 }
Steve Block44f0eee2011-05-26 01:26:41 +0100782}
783
784
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000785void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
786 // The variable in the declaration always resides in the current function
787 // context.
788 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
789 if (generate_debug_code_) {
790 // Check that we're not inside a with or catch context.
791 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
792 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
793 __ Check(ne, kDeclarationInWithContext,
794 a1, Operand(t0));
795 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
796 __ Check(ne, kDeclarationInCatchContext,
797 a1, Operand(t0));
798 }
799}
800
801
802void FullCodeGenerator::VisitVariableDeclaration(
803 VariableDeclaration* declaration) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000804 // If it was not possible to allocate the variable at compile time, we
805 // need to "declare" it at runtime to make sure it actually exists in the
806 // local context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000807 VariableProxy* proxy = declaration->proxy();
808 VariableMode mode = declaration->mode();
Ben Murdoch589d6972011-11-30 16:04:58 +0000809 Variable* variable = proxy->var();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000810 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
Ben Murdoch589d6972011-11-30 16:04:58 +0000811 switch (variable->location()) {
812 case Variable::UNALLOCATED:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000813 globals_->Add(variable->name(), zone());
814 globals_->Add(variable->binding_needs_init()
815 ? isolate()->factory()->the_hole_value()
816 : isolate()->factory()->undefined_value(),
817 zone());
Ben Murdoch589d6972011-11-30 16:04:58 +0000818 break;
819
820 case Variable::PARAMETER:
821 case Variable::LOCAL:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000822 if (hole_init) {
823 Comment cmnt(masm_, "[ VariableDeclaration");
824 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
825 __ sw(t0, StackOperand(variable));
Ben Murdoch257744e2011-11-30 15:57:28 +0000826 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000827 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000828
Ben Murdoch589d6972011-11-30 16:04:58 +0000829 case Variable::CONTEXT:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000830 if (hole_init) {
831 Comment cmnt(masm_, "[ VariableDeclaration");
832 EmitDebugCheckDeclarationContext(variable);
Ben Murdoch589d6972011-11-30 16:04:58 +0000833 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
834 __ sw(at, ContextOperand(cp, variable->index()));
835 // No write barrier since the_hole_value is in old space.
836 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000837 }
838 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000839
Ben Murdoch589d6972011-11-30 16:04:58 +0000840 case Variable::LOOKUP: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000841 Comment cmnt(masm_, "[ VariableDeclaration");
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000842 __ li(a2, Operand(variable->name()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100843 // Declaration nodes are always introduced in one of four modes.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000844 DCHECK(IsDeclaredVariableMode(mode));
845 PropertyAttributes attr =
846 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000847 __ li(a1, Operand(Smi::FromInt(attr)));
848 // Push initial value, if any.
849 // Note: For variables we must not push an initial value (such as
850 // 'undefined') because we may have a (legal) redeclaration and we
851 // must not destroy the current value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000852 if (hole_init) {
853 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
854 __ Push(cp, a2, a1, a0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000855 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000856 DCHECK(Smi::FromInt(0) == 0);
Ben Murdoch589d6972011-11-30 16:04:58 +0000857 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000858 __ Push(cp, a2, a1, a0);
859 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000860 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000861 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000862 }
863 }
Steve Block44f0eee2011-05-26 01:26:41 +0100864}
865
866
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000867void FullCodeGenerator::VisitFunctionDeclaration(
868 FunctionDeclaration* declaration) {
869 VariableProxy* proxy = declaration->proxy();
870 Variable* variable = proxy->var();
871 switch (variable->location()) {
872 case Variable::UNALLOCATED: {
873 globals_->Add(variable->name(), zone());
874 Handle<SharedFunctionInfo> function =
875 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
876 // Check for stack-overflow exception.
877 if (function.is_null()) return SetStackOverflow();
878 globals_->Add(function, zone());
879 break;
880 }
881
882 case Variable::PARAMETER:
883 case Variable::LOCAL: {
884 Comment cmnt(masm_, "[ FunctionDeclaration");
885 VisitForAccumulatorValue(declaration->fun());
886 __ sw(result_register(), StackOperand(variable));
887 break;
888 }
889
890 case Variable::CONTEXT: {
891 Comment cmnt(masm_, "[ FunctionDeclaration");
892 EmitDebugCheckDeclarationContext(variable);
893 VisitForAccumulatorValue(declaration->fun());
894 __ sw(result_register(), ContextOperand(cp, variable->index()));
895 int offset = Context::SlotOffset(variable->index());
896 // We know that we have written a function, which is not a smi.
897 __ RecordWriteContextSlot(cp,
898 offset,
899 result_register(),
900 a2,
901 kRAHasBeenSaved,
902 kDontSaveFPRegs,
903 EMIT_REMEMBERED_SET,
904 OMIT_SMI_CHECK);
905 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
906 break;
907 }
908
909 case Variable::LOOKUP: {
910 Comment cmnt(masm_, "[ FunctionDeclaration");
911 __ li(a2, Operand(variable->name()));
912 __ li(a1, Operand(Smi::FromInt(NONE)));
913 __ Push(cp, a2, a1);
914 // Push initial value for function declaration.
915 VisitForStackValue(declaration->fun());
916 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
917 break;
918 }
919 }
920}
921
922
923void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
924 Variable* variable = declaration->proxy()->var();
925 DCHECK(variable->location() == Variable::CONTEXT);
926 DCHECK(variable->interface()->IsFrozen());
927
928 Comment cmnt(masm_, "[ ModuleDeclaration");
929 EmitDebugCheckDeclarationContext(variable);
930
931 // Load instance object.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400932 __ LoadContext(a1, scope_->ContextChainLength(scope_->ScriptScope()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 __ lw(a1, ContextOperand(a1, variable->interface()->Index()));
934 __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
935
936 // Assign it.
937 __ sw(a1, ContextOperand(cp, variable->index()));
938 // We know that we have written a module, which is not a smi.
939 __ RecordWriteContextSlot(cp,
940 Context::SlotOffset(variable->index()),
941 a1,
942 a3,
943 kRAHasBeenSaved,
944 kDontSaveFPRegs,
945 EMIT_REMEMBERED_SET,
946 OMIT_SMI_CHECK);
947 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
948
949 // Traverse into body.
950 Visit(declaration->module());
951}
952
953
954void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
955 VariableProxy* proxy = declaration->proxy();
956 Variable* variable = proxy->var();
957 switch (variable->location()) {
958 case Variable::UNALLOCATED:
959 // TODO(rossberg)
960 break;
961
962 case Variable::CONTEXT: {
963 Comment cmnt(masm_, "[ ImportDeclaration");
964 EmitDebugCheckDeclarationContext(variable);
965 // TODO(rossberg)
966 break;
967 }
968
969 case Variable::PARAMETER:
970 case Variable::LOCAL:
971 case Variable::LOOKUP:
972 UNREACHABLE();
973 }
974}
975
976
977void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
978 // TODO(rossberg)
979}
980
981
Andrei Popescu31002712010-02-23 13:46:05 +0000982void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000983 // Call the runtime to declare the globals.
984 // The context is the first argument.
Ben Murdoch589d6972011-11-30 16:04:58 +0000985 __ li(a1, Operand(pairs));
986 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
987 __ Push(cp, a1, a0);
988 __ CallRuntime(Runtime::kDeclareGlobals, 3);
Ben Murdoch257744e2011-11-30 15:57:28 +0000989 // Return value is ignored.
Andrei Popescu31002712010-02-23 13:46:05 +0000990}
991
992
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
994 // Call the runtime to declare the modules.
995 __ Push(descriptions);
996 __ CallRuntime(Runtime::kDeclareModules, 1);
997 // Return value is ignored.
998}
999
1000
Steve Block44f0eee2011-05-26 01:26:41 +01001001void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001002 Comment cmnt(masm_, "[ SwitchStatement");
1003 Breakable nested_statement(this, stmt);
1004 SetStatementPosition(stmt);
1005
1006 // Keep the switch value on the stack until a case matches.
1007 VisitForStackValue(stmt->tag());
1008 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1009
1010 ZoneList<CaseClause*>* clauses = stmt->cases();
1011 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1012
1013 Label next_test; // Recycled for each test.
1014 // Compile all the tests with branches to their bodies.
1015 for (int i = 0; i < clauses->length(); i++) {
1016 CaseClause* clause = clauses->at(i);
1017 clause->body_target()->Unuse();
1018
1019 // The default is not a test, but remember it as final fall through.
1020 if (clause->is_default()) {
1021 default_clause = clause;
1022 continue;
1023 }
1024
1025 Comment cmnt(masm_, "[ Case comparison");
1026 __ bind(&next_test);
1027 next_test.Unuse();
1028
1029 // Compile the label expression.
1030 VisitForAccumulatorValue(clause->label());
1031 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1032
1033 // Perform the comparison as if via '==='.
1034 __ lw(a1, MemOperand(sp, 0)); // Switch value.
1035 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1036 JumpPatchSite patch_site(masm_);
1037 if (inline_smi_code) {
1038 Label slow_case;
1039 __ or_(a2, a1, a0);
1040 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1041
1042 __ Branch(&next_test, ne, a1, Operand(a0));
1043 __ Drop(1); // Switch value is no longer needed.
1044 __ Branch(clause->body_target());
1045
1046 __ bind(&slow_case);
1047 }
1048
1049 // Record position before stub call for type feedback.
1050 SetSourcePosition(clause->position());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001051 Handle<Code> ic =
1052 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1053 CallIC(ic, clause->CompareId());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001054 patch_site.EmitPatchInfo();
Ben Murdoch257744e2011-11-30 15:57:28 +00001055
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056 Label skip;
1057 __ Branch(&skip);
1058 PrepareForBailout(clause, TOS_REG);
1059 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1060 __ Branch(&next_test, ne, v0, Operand(at));
1061 __ Drop(1);
1062 __ Branch(clause->body_target());
1063 __ bind(&skip);
1064
Ben Murdoch257744e2011-11-30 15:57:28 +00001065 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1066 __ Drop(1); // Switch value is no longer needed.
1067 __ Branch(clause->body_target());
1068 }
1069
1070 // Discard the test value and jump to the default if present, otherwise to
1071 // the end of the statement.
1072 __ bind(&next_test);
1073 __ Drop(1); // Switch value is no longer needed.
1074 if (default_clause == NULL) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001075 __ Branch(nested_statement.break_label());
Ben Murdoch257744e2011-11-30 15:57:28 +00001076 } else {
1077 __ Branch(default_clause->body_target());
1078 }
1079
1080 // Compile all the case bodies.
1081 for (int i = 0; i < clauses->length(); i++) {
1082 Comment cmnt(masm_, "[ Case body");
1083 CaseClause* clause = clauses->at(i);
1084 __ bind(clause->body_target());
1085 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1086 VisitStatements(clause->statements());
1087 }
1088
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001089 __ bind(nested_statement.break_label());
Ben Murdoch257744e2011-11-30 15:57:28 +00001090 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
Steve Block44f0eee2011-05-26 01:26:41 +01001091}
1092
1093
1094void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001095 Comment cmnt(masm_, "[ ForInStatement");
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001096 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
Ben Murdoch257744e2011-11-30 15:57:28 +00001097 SetStatementPosition(stmt);
1098
1099 Label loop, exit;
1100 ForIn loop_statement(this, stmt);
1101 increment_loop_depth();
1102
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001103 // Get the object to enumerate over. If the object is null or undefined, skip
1104 // over the loop. See ECMA-262 version 5, section 12.6.4.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001105 SetExpressionPosition(stmt->enumerable());
Ben Murdoch257744e2011-11-30 15:57:28 +00001106 VisitForAccumulatorValue(stmt->enumerable());
1107 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1108 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1109 __ Branch(&exit, eq, a0, Operand(at));
1110 Register null_value = t1;
1111 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1112 __ Branch(&exit, eq, a0, Operand(null_value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001113 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1114 __ mov(a0, v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001115 // Convert the object to a JS object.
1116 Label convert, done_convert;
1117 __ JumpIfSmi(a0, &convert);
1118 __ GetObjectType(a0, a1, a1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001119 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001120 __ bind(&convert);
1121 __ push(a0);
1122 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1123 __ mov(a0, v0);
1124 __ bind(&done_convert);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001125 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +00001126 __ push(a0);
1127
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001128 // Check for proxies.
1129 Label call_runtime;
1130 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1131 __ GetObjectType(a0, a1, a1);
1132 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1133
Ben Murdoch257744e2011-11-30 15:57:28 +00001134 // Check cache validity in generated code. This is a fast case for
1135 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1136 // guarantee cache validity, call the runtime system to check cache
1137 // validity or get the property names in a fixed array.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001138 __ CheckEnumCache(null_value, &call_runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00001139
1140 // The enum cache is valid. Load the map of the object being
1141 // iterated over and use the cache for the iteration.
1142 Label use_cache;
1143 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1144 __ Branch(&use_cache);
1145
1146 // Get the set of properties to enumerate.
1147 __ bind(&call_runtime);
1148 __ push(a0); // Duplicate the enumerable object on the stack.
1149 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001150 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +00001151
1152 // If we got a map from the runtime call, we can do a fast
1153 // modification check. Otherwise, we got a fixed array, and we have
1154 // to do a slow check.
1155 Label fixed_array;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001156 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001157 __ LoadRoot(at, Heap::kMetaMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001158 __ Branch(&fixed_array, ne, a2, Operand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +00001159
1160 // We got a map in register v0. Get the enumeration cache from it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001161 Label no_descriptors;
Ben Murdoch257744e2011-11-30 15:57:28 +00001162 __ bind(&use_cache);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001163
1164 __ EnumLength(a1, v0);
1165 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1166
1167 __ LoadInstanceDescriptors(v0, a2);
1168 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1169 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001170
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001171 // Set up the four remaining stack slots.
Ben Murdoch257744e2011-11-30 15:57:28 +00001172 __ li(a0, Operand(Smi::FromInt(0)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1174 __ Push(v0, a2, a1, a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001175 __ jmp(&loop);
1176
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001177 __ bind(&no_descriptors);
1178 __ Drop(1);
1179 __ jmp(&exit);
1180
Ben Murdoch257744e2011-11-30 15:57:28 +00001181 // We got a fixed array in register v0. Iterate through that.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001182 Label non_proxy;
Ben Murdoch257744e2011-11-30 15:57:28 +00001183 __ bind(&fixed_array);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001184
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001185 __ li(a1, FeedbackVector());
1186 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001187 int vector_index = FeedbackVector()->GetIndex(slot);
1188 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001189
1190 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1191 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1192 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1193 __ GetObjectType(a2, a3, a3);
1194 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1195 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1196 __ bind(&non_proxy);
1197 __ Push(a1, v0); // Smi and array
Ben Murdoch257744e2011-11-30 15:57:28 +00001198 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1199 __ li(a0, Operand(Smi::FromInt(0)));
1200 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1201
1202 // Generate code for doing the condition check.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001203 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
Ben Murdoch257744e2011-11-30 15:57:28 +00001204 __ bind(&loop);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001205 SetExpressionPosition(stmt->each());
1206
Ben Murdoch257744e2011-11-30 15:57:28 +00001207 // Load the current count to a0, load the length to a1.
1208 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1209 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001210 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001211
1212 // Get the current entry of the array into register a3.
1213 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1214 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1215 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1216 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1217 __ lw(a3, MemOperand(t0)); // Current entry.
1218
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001219 // Get the expected map from the stack or a smi in the
Ben Murdoch257744e2011-11-30 15:57:28 +00001220 // permanent slow case into register a2.
1221 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1222
1223 // Check if the expected map still matches that of the enumerable.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001224 // If not, we may have to filter the key.
Ben Murdoch257744e2011-11-30 15:57:28 +00001225 Label update_each;
1226 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1227 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1228 __ Branch(&update_each, eq, t0, Operand(a2));
1229
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001230 // For proxies, no filtering is done.
1231 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232 DCHECK_EQ(Smi::FromInt(0), 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001233 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1234
Ben Murdoch257744e2011-11-30 15:57:28 +00001235 // Convert the entry to a string or (smi) 0 if it isn't a property
1236 // any more. If the property has been removed while iterating, we
1237 // just skip it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001238 __ Push(a1, a3); // Enumerable and current entry.
Ben Murdoch257744e2011-11-30 15:57:28 +00001239 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1240 __ mov(a3, result_register());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001241 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001242
1243 // Update the 'each' property or variable from the possibly filtered
1244 // entry in register a3.
1245 __ bind(&update_each);
1246 __ mov(result_register(), a3);
1247 // Perform the assignment as if via '='.
1248 { EffectContext context(this);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001249 EmitAssignment(stmt->each());
Ben Murdoch257744e2011-11-30 15:57:28 +00001250 }
1251
1252 // Generate code for the body of the loop.
1253 Visit(stmt->body());
1254
1255 // Generate code for the going to the next element by incrementing
1256 // the index (smi) stored on top of the stack.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001257 __ bind(loop_statement.continue_label());
Ben Murdoch257744e2011-11-30 15:57:28 +00001258 __ pop(a0);
1259 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1260 __ push(a0);
1261
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001262 EmitBackEdgeBookkeeping(stmt, &loop);
Ben Murdoch257744e2011-11-30 15:57:28 +00001263 __ Branch(&loop);
1264
1265 // Remove the pointers stored on the stack.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001266 __ bind(loop_statement.break_label());
Ben Murdoch257744e2011-11-30 15:57:28 +00001267 __ Drop(5);
1268
1269 // Exit and decrement the loop depth.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001270 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
Ben Murdoch257744e2011-11-30 15:57:28 +00001271 __ bind(&exit);
1272 decrement_loop_depth();
Steve Block44f0eee2011-05-26 01:26:41 +01001273}
1274
1275
1276void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1277 bool pretenure) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001278 // Use the fast case closure allocation code that allocates in new
1279 // space for nested functions that don't need literals cloning. If
1280 // we're running with the --always-opt or the --prepare-always-opt
1281 // flag, we need to use the runtime function so that the new function
1282 // we are creating here gets a chance to have its code optimized and
1283 // doesn't just get a copy of the existing unoptimized code.
1284 if (!FLAG_always_opt &&
1285 !FLAG_prepare_always_opt &&
1286 !pretenure &&
1287 scope()->is_function_scope() &&
1288 info->num_literals() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001289 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1290 __ li(a2, Operand(info));
Ben Murdoch257744e2011-11-30 15:57:28 +00001291 __ CallStub(&stub);
1292 } else {
1293 __ li(a0, Operand(info));
1294 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1295 : Heap::kFalseValueRootIndex);
1296 __ Push(cp, a0, a1);
1297 __ CallRuntime(Runtime::kNewClosure, 3);
1298 }
1299 context()->Plug(v0);
Andrei Popescu31002712010-02-23 13:46:05 +00001300}
1301
1302
1303void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001304 Comment cmnt(masm_, "[ VariableProxy");
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001305 EmitVariableLoad(expr);
Ben Murdoch257744e2011-11-30 15:57:28 +00001306}
1307
1308
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001309void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1310 Comment cnmt(masm_, "[ SuperReference ");
1311
1312 __ lw(LoadDescriptor::ReceiverRegister(),
1313 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1314
1315 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1316 __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1317
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001318 if (FLAG_vector_ics) {
1319 __ li(VectorLoadICDescriptor::SlotRegister(),
1320 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1321 CallLoadIC(NOT_CONTEXTUAL);
1322 } else {
1323 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1324 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001325
1326 Label done;
1327 __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1328 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1329 __ bind(&done);
1330}
1331
1332
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001333void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1334 int offset) {
1335 if (NeedsHomeObject(initializer)) {
1336 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1337 __ li(StoreDescriptor::NameRegister(),
1338 Operand(isolate()->factory()->home_object_symbol()));
1339 __ lw(StoreDescriptor::ValueRegister(),
1340 MemOperand(sp, offset * kPointerSize));
1341 CallStoreIC();
1342 }
1343}
1344
1345
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
Ben Murdoch589d6972011-11-30 16:04:58 +00001347 TypeofState typeof_state,
1348 Label* slow) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001349 Register current = cp;
1350 Register next = a1;
1351 Register temp = a2;
1352
1353 Scope* s = scope();
1354 while (s != NULL) {
1355 if (s->num_heap_slots() > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001356 if (s->calls_sloppy_eval()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001357 // Check that extension is NULL.
1358 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1359 __ Branch(slow, ne, temp, Operand(zero_reg));
1360 }
1361 // Load next context in chain.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001362 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
Ben Murdoch257744e2011-11-30 15:57:28 +00001363 // Walk the rest of the chain without clobbering cp.
1364 current = next;
1365 }
1366 // If no outer scope calls eval, we do not need to check more
1367 // context extensions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001368 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001369 s = s->outer_scope();
1370 }
1371
1372 if (s->is_eval_scope()) {
1373 Label loop, fast;
1374 if (!current.is(next)) {
1375 __ Move(next, current);
1376 }
1377 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378 // Terminate at native context.
Ben Murdoch257744e2011-11-30 15:57:28 +00001379 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001380 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001381 __ Branch(&fast, eq, temp, Operand(t0));
1382 // Check that extension is NULL.
1383 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1384 __ Branch(slow, ne, temp, Operand(zero_reg));
1385 // Load next context in chain.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001386 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
Ben Murdoch257744e2011-11-30 15:57:28 +00001387 __ Branch(&loop);
1388 __ bind(&fast);
1389 }
1390
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1392 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1393 if (FLAG_vector_ics) {
1394 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001395 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001396 }
1397
1398 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1399 ? NOT_CONTEXTUAL
1400 : CONTEXTUAL;
1401 CallLoadIC(mode);
Andrei Popescu31002712010-02-23 13:46:05 +00001402}
1403
1404
Ben Murdoch589d6972011-11-30 16:04:58 +00001405MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1406 Label* slow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001407 DCHECK(var->IsContextSlot());
Ben Murdoch257744e2011-11-30 15:57:28 +00001408 Register context = cp;
1409 Register next = a3;
1410 Register temp = t0;
1411
Ben Murdoch589d6972011-11-30 16:04:58 +00001412 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001413 if (s->num_heap_slots() > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001414 if (s->calls_sloppy_eval()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001415 // Check that extension is NULL.
1416 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1417 __ Branch(slow, ne, temp, Operand(zero_reg));
1418 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001419 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
Ben Murdoch257744e2011-11-30 15:57:28 +00001420 // Walk the rest of the chain without clobbering cp.
1421 context = next;
1422 }
1423 }
1424 // Check that last extension is NULL.
1425 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1426 __ Branch(slow, ne, temp, Operand(zero_reg));
1427
1428 // This function is used only for loads, not stores, so it's safe to
1429 // return an cp-based operand (the write barrier cannot be allowed to
1430 // destroy the cp register).
Ben Murdoch589d6972011-11-30 16:04:58 +00001431 return ContextOperand(context, var->index());
Steve Block44f0eee2011-05-26 01:26:41 +01001432}
1433
1434
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001435void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
Ben Murdoch589d6972011-11-30 16:04:58 +00001436 TypeofState typeof_state,
1437 Label* slow,
1438 Label* done) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001439 // Generate fast-case code for variables that might be shadowed by
1440 // eval-introduced variables. Eval is used a lot without
1441 // introducing variables. In those cases, we do not want to
1442 // perform a runtime call for all variables in the scope
1443 // containing the eval.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001444 Variable* var = proxy->var();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001445 if (var->mode() == DYNAMIC_GLOBAL) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001446 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001447 __ Branch(done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001448 } else if (var->mode() == DYNAMIC_LOCAL) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001449 Variable* local = var->local_if_not_shadowed();
1450 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001451 if (local->mode() == LET || local->mode() == CONST ||
1452 local->mode() == CONST_LEGACY) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001453 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1454 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001455 if (local->mode() == CONST_LEGACY) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001456 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1457 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001458 } else { // LET || CONST
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001459 __ Branch(done, ne, at, Operand(zero_reg));
1460 __ li(a0, Operand(var->name()));
1461 __ push(a0);
1462 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1463 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001464 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001465 __ Branch(done);
Ben Murdoch257744e2011-11-30 15:57:28 +00001466 }
Steve Block44f0eee2011-05-26 01:26:41 +01001467}
1468
1469
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001470void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1471 // Record position before possible IC call.
1472 SetSourcePosition(proxy->position());
1473 Variable* var = proxy->var();
Ben Murdoch257744e2011-11-30 15:57:28 +00001474
Ben Murdoch589d6972011-11-30 16:04:58 +00001475 // Three cases: global variables, lookup variables, and all other types of
1476 // variables.
1477 switch (var->location()) {
1478 case Variable::UNALLOCATED: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001479 Comment cmnt(masm_, "[ Global variable");
1480 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1481 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1482 if (FLAG_vector_ics) {
1483 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001484 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001485 }
1486 CallLoadIC(CONTEXTUAL);
Ben Murdoch589d6972011-11-30 16:04:58 +00001487 context()->Plug(v0);
1488 break;
1489 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001490
Ben Murdoch589d6972011-11-30 16:04:58 +00001491 case Variable::PARAMETER:
1492 case Variable::LOCAL:
1493 case Variable::CONTEXT: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001494 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1495 : "[ Stack variable");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001496 if (var->binding_needs_init()) {
1497 // var->scope() may be NULL when the proxy is located in eval code and
1498 // refers to a potential outside binding. Currently those bindings are
1499 // always looked up dynamically, i.e. in that case
1500 // var->location() == LOOKUP.
1501 // always holds.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001502 DCHECK(var->scope() != NULL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001503
1504 // Check if the binding really needs an initialization check. The check
1505 // can be skipped in the following situation: we have a LET or CONST
1506 // binding in harmony mode, both the Variable and the VariableProxy have
1507 // the same declaration scope (i.e. they are both in global code, in the
1508 // same function or in the same eval code) and the VariableProxy is in
1509 // the source physically located after the initializer of the variable.
1510 //
1511 // We cannot skip any initialization checks for CONST in non-harmony
1512 // mode because const variables may be declared but never initialized:
1513 // if (false) { const x; }; var y = x;
1514 //
1515 // The condition on the declaration scopes is a conservative check for
1516 // nested functions that access a binding and are called before the
1517 // binding is initialized:
1518 // function() { f(); let x = 1; function f() { x = 2; } }
1519 //
1520 bool skip_init_check;
1521 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1522 skip_init_check = false;
Ben Murdoch589d6972011-11-30 16:04:58 +00001523 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001524 // Check that we always have valid source position.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001525 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1526 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1527 skip_init_check = var->mode() != CONST_LEGACY &&
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001528 var->initializer_position() < proxy->position();
Ben Murdoch589d6972011-11-30 16:04:58 +00001529 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001530
1531 if (!skip_init_check) {
1532 // Let and const need a read barrier.
1533 GetVar(v0, var);
1534 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1535 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001536 if (var->mode() == LET || var->mode() == CONST) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001537 // Throw a reference error when using an uninitialized let/const
1538 // binding in harmony mode.
1539 Label done;
1540 __ Branch(&done, ne, at, Operand(zero_reg));
1541 __ li(a0, Operand(var->name()));
1542 __ push(a0);
1543 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1544 __ bind(&done);
1545 } else {
1546 // Uninitalized const bindings outside of harmony mode are unholed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001547 DCHECK(var->mode() == CONST_LEGACY);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001548 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1549 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1550 }
1551 context()->Plug(v0);
1552 break;
1553 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001554 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001555 context()->Plug(var);
Ben Murdoch589d6972011-11-30 16:04:58 +00001556 break;
1557 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001558
Ben Murdoch589d6972011-11-30 16:04:58 +00001559 case Variable::LOOKUP: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001560 Comment cmnt(masm_, "[ Lookup variable");
Ben Murdoch589d6972011-11-30 16:04:58 +00001561 Label done, slow;
1562 // Generate code for loading from variables potentially shadowed
1563 // by eval-introduced variables.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001564 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
Ben Murdoch589d6972011-11-30 16:04:58 +00001565 __ bind(&slow);
Ben Murdoch589d6972011-11-30 16:04:58 +00001566 __ li(a1, Operand(var->name()));
1567 __ Push(cp, a1); // Context and name.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001568 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
Ben Murdoch589d6972011-11-30 16:04:58 +00001569 __ bind(&done);
1570 context()->Plug(v0);
1571 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001572 }
Andrei Popescu31002712010-02-23 13:46:05 +00001573}
1574
1575
1576void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001577 Comment cmnt(masm_, "[ RegExpLiteral");
1578 Label materialized;
1579 // Registers will be used as follows:
1580 // t1 = materialized value (RegExp literal)
1581 // t0 = JS function, literals array
1582 // a3 = literal index
1583 // a2 = RegExp pattern
1584 // a1 = RegExp flags
1585 // a0 = RegExp literal clone
1586 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1587 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1588 int literal_offset =
1589 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1590 __ lw(t1, FieldMemOperand(t0, literal_offset));
1591 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1592 __ Branch(&materialized, ne, t1, Operand(at));
1593
1594 // Create regexp literal using runtime function.
1595 // Result will be in v0.
1596 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1597 __ li(a2, Operand(expr->pattern()));
1598 __ li(a1, Operand(expr->flags()));
1599 __ Push(t0, a3, a2, a1);
1600 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1601 __ mov(t1, v0);
1602
1603 __ bind(&materialized);
1604 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1605 Label allocated, runtime_allocate;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001606 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
Ben Murdoch257744e2011-11-30 15:57:28 +00001607 __ jmp(&allocated);
1608
1609 __ bind(&runtime_allocate);
Ben Murdoch257744e2011-11-30 15:57:28 +00001610 __ li(a0, Operand(Smi::FromInt(size)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001611 __ Push(t1, a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001612 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1613 __ pop(t1);
1614
1615 __ bind(&allocated);
1616
1617 // After this, registers are used as follows:
1618 // v0: Newly allocated regexp.
1619 // t1: Materialized regexp.
1620 // a2: temp.
1621 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1622 context()->Plug(v0);
Andrei Popescu31002712010-02-23 13:46:05 +00001623}
1624
1625
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001626void FullCodeGenerator::EmitAccessor(Expression* expression) {
1627 if (expression == NULL) {
1628 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1629 __ push(a1);
1630 } else {
1631 VisitForStackValue(expression);
1632 }
1633}
1634
1635
Andrei Popescu31002712010-02-23 13:46:05 +00001636void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001637 Comment cmnt(masm_, "[ ObjectLiteral");
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001638
1639 expr->BuildConstantProperties(isolate());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001640 Handle<FixedArray> constant_properties = expr->constant_properties();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001641 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001642 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1643 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001644 __ li(a1, Operand(constant_properties));
Ben Murdoch257744e2011-11-30 15:57:28 +00001645 int flags = expr->fast_elements()
1646 ? ObjectLiteral::kFastElements
1647 : ObjectLiteral::kNoFlags;
1648 flags |= expr->has_function()
1649 ? ObjectLiteral::kHasFunction
1650 : ObjectLiteral::kNoFlags;
1651 __ li(a0, Operand(Smi::FromInt(flags)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001652 int properties_count = constant_properties->length() / 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 if (expr->may_store_doubles() || expr->depth() > 1 ||
1654 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001655 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001656 __ Push(a3, a2, a1, a0);
1657 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001658 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001659 FastCloneShallowObjectStub stub(isolate(), properties_count);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001660 __ CallStub(&stub);
Ben Murdoch257744e2011-11-30 15:57:28 +00001661 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001662 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +00001663
1664 // If result_saved is true the result is on top of the stack. If
1665 // result_saved is false the result is in v0.
1666 bool result_saved = false;
1667
1668 // Mark all computed expressions that are bound to a key that
1669 // is shadowed by a later occurrence of the same key. For the
1670 // marked expressions, no store code is emitted.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001671 expr->CalculateEmitStore(zone());
Ben Murdoch257744e2011-11-30 15:57:28 +00001672
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001673 AccessorTable accessor_table(zone());
Ben Murdoch257744e2011-11-30 15:57:28 +00001674 for (int i = 0; i < expr->properties()->length(); i++) {
1675 ObjectLiteral::Property* property = expr->properties()->at(i);
1676 if (property->IsCompileTimeValue()) continue;
1677
1678 Literal* key = property->key();
1679 Expression* value = property->value();
1680 if (!result_saved) {
1681 __ push(v0); // Save result on stack.
1682 result_saved = true;
1683 }
1684 switch (property->kind()) {
1685 case ObjectLiteral::Property::CONSTANT:
1686 UNREACHABLE();
1687 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001688 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001689 // Fall through.
1690 case ObjectLiteral::Property::COMPUTED:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001691 // It is safe to use [[Put]] here because the boilerplate already
1692 // contains computed properties with an uninitialized value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 if (key->value()->IsInternalizedString()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001694 if (property->emit_store()) {
1695 VisitForAccumulatorValue(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001696 __ mov(StoreDescriptor::ValueRegister(), result_register());
1697 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1698 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1699 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1700 CallStoreIC(key->LiteralFeedbackId());
Ben Murdoch257744e2011-11-30 15:57:28 +00001701 PrepareForBailoutForId(key->id(), NO_REGISTERS);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001702
1703 if (NeedsHomeObject(value)) {
1704 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1705 __ li(StoreDescriptor::NameRegister(),
1706 Operand(isolate()->factory()->home_object_symbol()));
1707 __ lw(StoreDescriptor::ValueRegister(), MemOperand(sp));
1708 CallStoreIC();
1709 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001710 } else {
1711 VisitForEffect(value);
1712 }
1713 break;
1714 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001715 // Duplicate receiver on stack.
1716 __ lw(a0, MemOperand(sp));
1717 __ push(a0);
1718 VisitForStackValue(key);
1719 VisitForStackValue(value);
1720 if (property->emit_store()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001721 EmitSetHomeObjectIfNeeded(value, 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
Ben Murdoch257744e2011-11-30 15:57:28 +00001723 __ push(a0);
1724 __ CallRuntime(Runtime::kSetProperty, 4);
1725 } else {
1726 __ Drop(3);
1727 }
1728 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001729 case ObjectLiteral::Property::PROTOTYPE:
1730 // Duplicate receiver on stack.
1731 __ lw(a0, MemOperand(sp));
1732 __ push(a0);
1733 VisitForStackValue(value);
1734 if (property->emit_store()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001735 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001736 } else {
1737 __ Drop(2);
1738 }
1739 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001740 case ObjectLiteral::Property::GETTER:
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001741 accessor_table.lookup(key)->second->getter = value;
1742 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001743 case ObjectLiteral::Property::SETTER:
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001744 accessor_table.lookup(key)->second->setter = value;
Ben Murdoch257744e2011-11-30 15:57:28 +00001745 break;
1746 }
1747 }
1748
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001749 // Emit code to define accessors, using only a single call to the runtime for
1750 // each pair of corresponding getters and setters.
1751 for (AccessorTable::Iterator it = accessor_table.begin();
1752 it != accessor_table.end();
1753 ++it) {
1754 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1755 __ push(a0);
1756 VisitForStackValue(it->first);
1757 EmitAccessor(it->second->getter);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001758 EmitSetHomeObjectIfNeeded(it->second->getter, 2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001759 EmitAccessor(it->second->setter);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001760 EmitSetHomeObjectIfNeeded(it->second->setter, 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001761 __ li(a0, Operand(Smi::FromInt(NONE)));
1762 __ push(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001763 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001764 }
1765
Ben Murdoch257744e2011-11-30 15:57:28 +00001766 if (expr->has_function()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001767 DCHECK(result_saved);
Ben Murdoch257744e2011-11-30 15:57:28 +00001768 __ lw(a0, MemOperand(sp));
1769 __ push(a0);
1770 __ CallRuntime(Runtime::kToFastProperties, 1);
1771 }
1772
1773 if (result_saved) {
1774 context()->PlugTOS();
1775 } else {
1776 context()->Plug(v0);
1777 }
Andrei Popescu31002712010-02-23 13:46:05 +00001778}
1779
1780
1781void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001782 Comment cmnt(masm_, "[ ArrayLiteral");
1783
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001784 expr->BuildConstantElements(isolate());
1785 int flags = expr->depth() == 1
1786 ? ArrayLiteral::kShallowElements
1787 : ArrayLiteral::kNoFlags;
1788
Ben Murdoch257744e2011-11-30 15:57:28 +00001789 ZoneList<Expression*>* subexprs = expr->values();
1790 int length = subexprs->length();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001791
1792 Handle<FixedArray> constant_elements = expr->constant_elements();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 DCHECK_EQ(2, constant_elements->length());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001794 ElementsKind constant_elements_kind =
1795 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001796 bool has_fast_elements =
1797 IsFastObjectElementsKind(constant_elements_kind);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001798 Handle<FixedArrayBase> constant_elements_values(
1799 FixedArrayBase::cast(constant_elements->get(1)));
1800
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001801 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1802 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1803 // If the only customer of allocation sites is transitioning, then
1804 // we can turn it off if we don't have anywhere else to transition to.
1805 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1806 }
1807
Ben Murdoch257744e2011-11-30 15:57:28 +00001808 __ mov(a0, result_register());
1809 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1810 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1811 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001812 __ li(a1, Operand(constant_elements));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001813 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1814 __ li(a0, Operand(Smi::FromInt(flags)));
1815 __ Push(a3, a2, a1, a0);
1816 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
Ben Murdoch257744e2011-11-30 15:57:28 +00001817 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001818 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
Ben Murdoch257744e2011-11-30 15:57:28 +00001819 __ CallStub(&stub);
1820 }
1821
1822 bool result_saved = false; // Is the result saved to the stack?
1823
1824 // Emit code to evaluate all the non-constant subexpressions and to store
1825 // them into the newly cloned array.
1826 for (int i = 0; i < length; i++) {
1827 Expression* subexpr = subexprs->at(i);
1828 // If the subexpression is a literal or a simple materialized literal it
1829 // is already set in the cloned array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001830 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
Ben Murdoch257744e2011-11-30 15:57:28 +00001831
1832 if (!result_saved) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833 __ push(v0); // array literal
1834 __ Push(Smi::FromInt(expr->literal_index()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001835 result_saved = true;
1836 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001837
Ben Murdoch257744e2011-11-30 15:57:28 +00001838 VisitForAccumulatorValue(subexpr);
1839
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001840 if (IsFastObjectElementsKind(constant_elements_kind)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001841 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001842 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001843 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1844 __ sw(result_register(), FieldMemOperand(a1, offset));
1845 // Update the write barrier for the array store.
1846 __ RecordWriteField(a1, offset, result_register(), a2,
1847 kRAHasBeenSaved, kDontSaveFPRegs,
1848 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1849 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001850 __ li(a3, Operand(Smi::FromInt(i)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001851 __ mov(a0, result_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 StoreArrayLiteralElementStub stub(isolate());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001853 __ CallStub(&stub);
1854 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001855
1856 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1857 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001858 if (result_saved) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001859 __ Pop(); // literal index
Ben Murdoch257744e2011-11-30 15:57:28 +00001860 context()->PlugTOS();
1861 } else {
1862 context()->Plug(v0);
1863 }
Andrei Popescu31002712010-02-23 13:46:05 +00001864}
1865
1866
Steve Block6ded16b2010-05-10 14:33:55 +01001867void FullCodeGenerator::VisitAssignment(Assignment* expr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001868 DCHECK(expr->target()->IsValidReferenceExpression());
1869
Ben Murdoch257744e2011-11-30 15:57:28 +00001870 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch257744e2011-11-30 15:57:28 +00001871
Ben Murdoch257744e2011-11-30 15:57:28 +00001872 Property* property = expr->target()->AsProperty();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001873 LhsKind assign_type = GetAssignType(property);
Ben Murdoch257744e2011-11-30 15:57:28 +00001874
1875 // Evaluate LHS expression.
1876 switch (assign_type) {
1877 case VARIABLE:
1878 // Nothing to do here.
1879 break;
1880 case NAMED_PROPERTY:
1881 if (expr->is_compound()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001882 // We need the receiver both on the stack and in the register.
1883 VisitForStackValue(property->obj());
1884 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001885 } else {
1886 VisitForStackValue(property->obj());
1887 }
1888 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001889 case NAMED_SUPER_PROPERTY:
1890 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1891 EmitLoadHomeObject(property->obj()->AsSuperReference());
1892 __ Push(result_register());
1893 if (expr->is_compound()) {
1894 const Register scratch = a1;
1895 __ lw(scratch, MemOperand(sp, kPointerSize));
1896 __ Push(scratch, result_register());
1897 }
1898 break;
1899 case KEYED_SUPER_PROPERTY: {
1900 const Register scratch = a1;
1901 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1902 EmitLoadHomeObject(property->obj()->AsSuperReference());
1903 __ Move(scratch, result_register());
1904 VisitForAccumulatorValue(property->key());
1905 __ Push(scratch, result_register());
1906 if (expr->is_compound()) {
1907 const Register scratch1 = t0;
1908 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
1909 __ Push(scratch1, scratch, result_register());
1910 }
1911 break;
1912 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001913 case KEYED_PROPERTY:
1914 // We need the key and receiver on both the stack and in v0 and a1.
1915 if (expr->is_compound()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001916 VisitForStackValue(property->obj());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917 VisitForStackValue(property->key());
1918 __ lw(LoadDescriptor::ReceiverRegister(),
1919 MemOperand(sp, 1 * kPointerSize));
1920 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001921 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001922 VisitForStackValue(property->obj());
1923 VisitForStackValue(property->key());
Ben Murdoch257744e2011-11-30 15:57:28 +00001924 }
1925 break;
1926 }
1927
1928 // For compound assignments we need another deoptimization point after the
1929 // variable/property load.
1930 if (expr->is_compound()) {
1931 { AccumulatorValueContext context(this);
1932 switch (assign_type) {
1933 case VARIABLE:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001934 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdoch257744e2011-11-30 15:57:28 +00001935 PrepareForBailout(expr->target(), TOS_REG);
1936 break;
1937 case NAMED_PROPERTY:
1938 EmitNamedPropertyLoad(property);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001939 PrepareForBailoutForId(property->LoadId(), TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +00001940 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001941 case NAMED_SUPER_PROPERTY:
1942 EmitNamedSuperPropertyLoad(property);
1943 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1944 break;
1945 case KEYED_SUPER_PROPERTY:
1946 EmitKeyedSuperPropertyLoad(property);
1947 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1948 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001949 case KEYED_PROPERTY:
1950 EmitKeyedPropertyLoad(property);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001951 PrepareForBailoutForId(property->LoadId(), TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +00001952 break;
1953 }
1954 }
1955
1956 Token::Value op = expr->binary_op();
1957 __ push(v0); // Left operand goes on the stack.
1958 VisitForAccumulatorValue(expr->value());
1959
1960 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1961 ? OVERWRITE_RIGHT
1962 : NO_OVERWRITE;
1963 SetSourcePosition(expr->position() + 1);
1964 AccumulatorValueContext context(this);
1965 if (ShouldInlineSmiCase(op)) {
1966 EmitInlineSmiBinaryOp(expr->binary_operation(),
1967 op,
1968 mode,
1969 expr->target(),
1970 expr->value());
1971 } else {
1972 EmitBinaryOp(expr->binary_operation(), op, mode);
1973 }
1974
1975 // Deoptimization point in case the binary operation may have side effects.
1976 PrepareForBailout(expr->binary_operation(), TOS_REG);
1977 } else {
1978 VisitForAccumulatorValue(expr->value());
1979 }
1980
1981 // Record source position before possible IC call.
1982 SetSourcePosition(expr->position());
1983
1984 // Store the value.
1985 switch (assign_type) {
1986 case VARIABLE:
1987 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1988 expr->op());
1989 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1990 context()->Plug(v0);
1991 break;
1992 case NAMED_PROPERTY:
1993 EmitNamedPropertyAssignment(expr);
1994 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001995 case NAMED_SUPER_PROPERTY:
1996 EmitNamedSuperPropertyStore(property);
1997 context()->Plug(v0);
1998 break;
1999 case KEYED_SUPER_PROPERTY:
2000 EmitKeyedSuperPropertyStore(property);
2001 context()->Plug(v0);
2002 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00002003 case KEYED_PROPERTY:
2004 EmitKeyedPropertyAssignment(expr);
2005 break;
2006 }
Steve Block6ded16b2010-05-10 14:33:55 +01002007}
2008
2009
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002010void FullCodeGenerator::VisitYield(Yield* expr) {
2011 Comment cmnt(masm_, "[ Yield");
2012 // Evaluate yielded value first; the initial iterator definition depends on
2013 // this. It stays on the stack while we update the iterator.
2014 VisitForStackValue(expr->expression());
2015
2016 switch (expr->yield_kind()) {
2017 case Yield::kSuspend:
2018 // Pop value from top-of-stack slot; box result into result register.
2019 EmitCreateIteratorResult(false);
2020 __ push(result_register());
2021 // Fall through.
2022 case Yield::kInitial: {
2023 Label suspend, continuation, post_runtime, resume;
2024
2025 __ jmp(&suspend);
2026
2027 __ bind(&continuation);
2028 __ jmp(&resume);
2029
2030 __ bind(&suspend);
2031 VisitForAccumulatorValue(expr->generator_object());
2032 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2033 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2034 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2035 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2036 __ mov(a1, cp);
2037 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2038 kRAHasBeenSaved, kDontSaveFPRegs);
2039 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2040 __ Branch(&post_runtime, eq, sp, Operand(a1));
2041 __ push(v0); // generator object
2042 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2043 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2044 __ bind(&post_runtime);
2045 __ pop(result_register());
2046 EmitReturnSequence();
2047
2048 __ bind(&resume);
2049 context()->Plug(result_register());
2050 break;
2051 }
2052
2053 case Yield::kFinal: {
2054 VisitForAccumulatorValue(expr->generator_object());
2055 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2056 __ sw(a1, FieldMemOperand(result_register(),
2057 JSGeneratorObject::kContinuationOffset));
2058 // Pop value from top-of-stack slot, box result into result register.
2059 EmitCreateIteratorResult(true);
2060 EmitUnwindBeforeReturn();
2061 EmitReturnSequence();
2062 break;
2063 }
2064
2065 case Yield::kDelegating: {
2066 VisitForStackValue(expr->generator_object());
2067
2068 // Initial stack layout is as follows:
2069 // [sp + 1 * kPointerSize] iter
2070 // [sp + 0 * kPointerSize] g
2071
2072 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2073 Label l_next, l_call;
2074 Register load_receiver = LoadDescriptor::ReceiverRegister();
2075 Register load_name = LoadDescriptor::NameRegister();
2076
2077 // Initial send value is undefined.
2078 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2079 __ Branch(&l_next);
2080
2081 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2082 __ bind(&l_catch);
2083 __ mov(a0, v0);
2084 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2085 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2086 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2087 __ Push(load_name, a3, a0); // "throw", iter, except
2088 __ jmp(&l_call);
2089
2090 // try { received = %yield result }
2091 // Shuffle the received result above a try handler and yield it without
2092 // re-boxing.
2093 __ bind(&l_try);
2094 __ pop(a0); // result
2095 __ PushTryHandler(StackHandler::CATCH, expr->index());
2096 const int handler_size = StackHandlerConstants::kSize;
2097 __ push(a0); // result
2098 __ jmp(&l_suspend);
2099 __ bind(&l_continuation);
2100 __ mov(a0, v0);
2101 __ jmp(&l_resume);
2102 __ bind(&l_suspend);
2103 const int generator_object_depth = kPointerSize + handler_size;
2104 __ lw(a0, MemOperand(sp, generator_object_depth));
2105 __ push(a0); // g
2106 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2107 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2108 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2109 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2110 __ mov(a1, cp);
2111 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2112 kRAHasBeenSaved, kDontSaveFPRegs);
2113 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2114 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2115 __ pop(v0); // result
2116 EmitReturnSequence();
2117 __ mov(a0, v0);
2118 __ bind(&l_resume); // received in a0
2119 __ PopTryHandler();
2120
2121 // receiver = iter; f = 'next'; arg = received;
2122 __ bind(&l_next);
2123
2124 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2125 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2126 __ Push(load_name, a3, a0); // "next", iter, received
2127
2128 // result = receiver[f](arg);
2129 __ bind(&l_call);
2130 __ lw(load_receiver, MemOperand(sp, kPointerSize));
2131 __ lw(load_name, MemOperand(sp, 2 * kPointerSize));
2132 if (FLAG_vector_ics) {
2133 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002134 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002135 }
2136 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2137 CallIC(ic, TypeFeedbackId::None());
2138 __ mov(a0, v0);
2139 __ mov(a1, a0);
2140 __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2141 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2142 __ CallStub(&stub);
2143
2144 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2145 __ Drop(1); // The function is still on the stack; drop it.
2146
2147 // if (!result.done) goto l_try;
2148 __ Move(load_receiver, v0);
2149
2150 __ push(load_receiver); // save result
2151 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2152 if (FLAG_vector_ics) {
2153 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002154 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002155 }
2156 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2157 __ mov(a0, v0);
2158 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2159 CallIC(bool_ic);
2160 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2161
2162 // result.value
2163 __ pop(load_receiver); // result
2164 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2165 if (FLAG_vector_ics) {
2166 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002167 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002168 }
2169 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2170 context()->DropAndPlug(2, v0); // drop iter and g
2171 break;
2172 }
2173 }
2174}
2175
2176
2177void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2178 Expression *value,
2179 JSGeneratorObject::ResumeMode resume_mode) {
2180 // The value stays in a0, and is ultimately read by the resumed generator, as
2181 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2182 // is read to throw the value when the resumed generator is already closed.
2183 // a1 will hold the generator object until the activation has been resumed.
2184 VisitForStackValue(generator);
2185 VisitForAccumulatorValue(value);
2186 __ pop(a1);
2187
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002188 // Load suspended function and context.
2189 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2190 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2191
2192 // Load receiver and store as the first argument.
2193 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2194 __ push(a2);
2195
2196 // Push holes for the rest of the arguments to the generator function.
2197 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
2198 __ lw(a3,
2199 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2200 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2201 Label push_argument_holes, push_frame;
2202 __ bind(&push_argument_holes);
2203 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2204 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2205 __ push(a2);
2206 __ jmp(&push_argument_holes);
2207
2208 // Enter a new JavaScript frame, and initialize its slots as they were when
2209 // the generator was suspended.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002210 Label resume_frame, done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002211 __ bind(&push_frame);
2212 __ Call(&resume_frame);
2213 __ jmp(&done);
2214 __ bind(&resume_frame);
2215 // ra = return address.
2216 // fp = caller's frame pointer.
2217 // cp = callee's context,
2218 // t0 = callee's JS function.
2219 __ Push(ra, fp, cp, t0);
2220 // Adjust FP to point to saved FP.
2221 __ Addu(fp, sp, 2 * kPointerSize);
2222
2223 // Load the operand stack size.
2224 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2225 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2226 __ SmiUntag(a3);
2227
2228 // If we are sending a value and there is no operand stack, we can jump back
2229 // in directly.
2230 if (resume_mode == JSGeneratorObject::NEXT) {
2231 Label slow_resume;
2232 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2233 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2234 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2235 __ SmiUntag(a2);
2236 __ Addu(a3, a3, Operand(a2));
2237 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2238 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2239 __ Jump(a3);
2240 __ bind(&slow_resume);
2241 }
2242
2243 // Otherwise, we push holes for the operand stack and call the runtime to fix
2244 // up the stack and the handlers.
2245 Label push_operand_holes, call_resume;
2246 __ bind(&push_operand_holes);
2247 __ Subu(a3, a3, Operand(1));
2248 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2249 __ push(a2);
2250 __ Branch(&push_operand_holes);
2251 __ bind(&call_resume);
2252 DCHECK(!result_register().is(a1));
2253 __ Push(a1, result_register());
2254 __ Push(Smi::FromInt(resume_mode));
2255 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2256 // Not reached: the runtime call returns elsewhere.
2257 __ stop("not-reached");
2258
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002259 __ bind(&done);
2260 context()->Plug(result_register());
2261}
2262
2263
2264void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2265 Label gc_required;
2266 Label allocated;
2267
2268 const int instance_size = 5 * kPointerSize;
2269 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2270 instance_size);
2271
2272 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2273 __ jmp(&allocated);
2274
2275 __ bind(&gc_required);
2276 __ Push(Smi::FromInt(instance_size));
2277 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2278 __ lw(context_register(),
2279 MemOperand(fp, StandardFrameConstants::kContextOffset));
2280
2281 __ bind(&allocated);
2282 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2283 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2284 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2285 __ pop(a2);
2286 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2287 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2288 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2289 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2290 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2291 __ sw(a2,
2292 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2293 __ sw(a3,
2294 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2295
2296 // Only the value field needs a write barrier, as the other values are in the
2297 // root set.
2298 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2299 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2300}
2301
2302
Andrei Popescu31002712010-02-23 13:46:05 +00002303void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002304 SetSourcePosition(prop->position());
2305 Literal* key = prop->key()->AsLiteral();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002306 DCHECK(!prop->IsSuperAccess());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002307
2308 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2309 if (FLAG_vector_ics) {
2310 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002311 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002312 CallLoadIC(NOT_CONTEXTUAL);
2313 } else {
2314 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2315 }
2316}
2317
2318
2319void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002320 // Stack: receiver, home_object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002321 SetSourcePosition(prop->position());
2322 Literal* key = prop->key()->AsLiteral();
2323 DCHECK(!key->value()->IsSmi());
2324 DCHECK(prop->IsSuperAccess());
2325
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002326 __ Push(key->value());
2327 __ CallRuntime(Runtime::kLoadFromSuper, 3);
Andrei Popescu31002712010-02-23 13:46:05 +00002328}
2329
2330
2331void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002332 SetSourcePosition(prop->position());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002333 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2334 if (FLAG_vector_ics) {
2335 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002336 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002337 CallIC(ic);
2338 } else {
2339 CallIC(ic, prop->PropertyFeedbackId());
2340 }
Andrei Popescu31002712010-02-23 13:46:05 +00002341}
2342
2343
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002344void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2345 // Stack: receiver, home_object, key.
2346 SetSourcePosition(prop->position());
2347
2348 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2349}
2350
2351
Ben Murdoch257744e2011-11-30 15:57:28 +00002352void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
Steve Block44f0eee2011-05-26 01:26:41 +01002353 Token::Value op,
2354 OverwriteMode mode,
Ben Murdoch257744e2011-11-30 15:57:28 +00002355 Expression* left_expr,
2356 Expression* right_expr) {
2357 Label done, smi_case, stub_call;
2358
2359 Register scratch1 = a2;
2360 Register scratch2 = a3;
2361
2362 // Get the arguments.
2363 Register left = a1;
2364 Register right = a0;
2365 __ pop(left);
2366 __ mov(a0, result_register());
2367
2368 // Perform combined smi check on both operands.
2369 __ Or(scratch1, left, Operand(right));
2370 STATIC_ASSERT(kSmiTag == 0);
2371 JumpPatchSite patch_site(masm_);
2372 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2373
2374 __ bind(&stub_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002375 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2376 CallIC(code, expr->BinaryOperationFeedbackId());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002377 patch_site.EmitPatchInfo();
Ben Murdoch257744e2011-11-30 15:57:28 +00002378 __ jmp(&done);
2379
2380 __ bind(&smi_case);
2381 // Smi case. This code works the same way as the smi-smi case in the type
2382 // recording binary operation stub, see
Ben Murdoch257744e2011-11-30 15:57:28 +00002383 switch (op) {
2384 case Token::SAR:
Ben Murdoch257744e2011-11-30 15:57:28 +00002385 __ GetLeastBitsFromSmi(scratch1, right, 5);
2386 __ srav(right, left, scratch1);
2387 __ And(v0, right, Operand(~kSmiTagMask));
2388 break;
2389 case Token::SHL: {
Ben Murdoch257744e2011-11-30 15:57:28 +00002390 __ SmiUntag(scratch1, left);
2391 __ GetLeastBitsFromSmi(scratch2, right, 5);
2392 __ sllv(scratch1, scratch1, scratch2);
2393 __ Addu(scratch2, scratch1, Operand(0x40000000));
2394 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2395 __ SmiTag(v0, scratch1);
2396 break;
2397 }
2398 case Token::SHR: {
Ben Murdoch257744e2011-11-30 15:57:28 +00002399 __ SmiUntag(scratch1, left);
2400 __ GetLeastBitsFromSmi(scratch2, right, 5);
2401 __ srlv(scratch1, scratch1, scratch2);
2402 __ And(scratch2, scratch1, 0xc0000000);
2403 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2404 __ SmiTag(v0, scratch1);
2405 break;
2406 }
2407 case Token::ADD:
2408 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2409 __ BranchOnOverflow(&stub_call, scratch1);
2410 break;
2411 case Token::SUB:
2412 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2413 __ BranchOnOverflow(&stub_call, scratch1);
2414 break;
2415 case Token::MUL: {
2416 __ SmiUntag(scratch1, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002417 __ Mul(scratch2, v0, left, scratch1);
2418 __ sra(scratch1, v0, 31);
Ben Murdoch257744e2011-11-30 15:57:28 +00002419 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002420 __ Branch(&done, ne, v0, Operand(zero_reg));
2421 __ Addu(scratch2, right, left);
2422 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002423 DCHECK(Smi::FromInt(0) == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002424 __ mov(v0, zero_reg);
2425 break;
2426 }
2427 case Token::BIT_OR:
2428 __ Or(v0, left, Operand(right));
2429 break;
2430 case Token::BIT_AND:
2431 __ And(v0, left, Operand(right));
2432 break;
2433 case Token::BIT_XOR:
2434 __ Xor(v0, left, Operand(right));
2435 break;
2436 default:
2437 UNREACHABLE();
2438 }
2439
2440 __ bind(&done);
2441 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01002442}
2443
2444
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002445void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2446 // Constructor is in v0.
2447 DCHECK(lit != NULL);
2448 __ push(v0);
2449
2450 // No access check is needed here since the constructor is created by the
2451 // class literal.
2452 Register scratch = a1;
2453 __ lw(scratch,
2454 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2455 __ push(scratch);
2456
2457 for (int i = 0; i < lit->properties()->length(); i++) {
2458 ObjectLiteral::Property* property = lit->properties()->at(i);
2459 Literal* key = property->key()->AsLiteral();
2460 Expression* value = property->value();
2461 DCHECK(key != NULL);
2462
2463 if (property->is_static()) {
2464 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
2465 } else {
2466 __ lw(scratch, MemOperand(sp, 0)); // prototype
2467 }
2468 __ push(scratch);
2469 VisitForStackValue(key);
2470 VisitForStackValue(value);
2471 EmitSetHomeObjectIfNeeded(value, 2);
2472
2473 switch (property->kind()) {
2474 case ObjectLiteral::Property::CONSTANT:
2475 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2476 case ObjectLiteral::Property::COMPUTED:
2477 case ObjectLiteral::Property::PROTOTYPE:
2478 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2479 break;
2480
2481 case ObjectLiteral::Property::GETTER:
2482 __ CallRuntime(Runtime::kDefineClassGetter, 3);
2483 break;
2484
2485 case ObjectLiteral::Property::SETTER:
2486 __ CallRuntime(Runtime::kDefineClassSetter, 3);
2487 break;
2488
2489 default:
2490 UNREACHABLE();
2491 }
2492 }
2493
2494 // prototype
2495 __ CallRuntime(Runtime::kToFastProperties, 1);
2496
2497 // constructor
2498 __ CallRuntime(Runtime::kToFastProperties, 1);
2499}
2500
2501
Ben Murdoch257744e2011-11-30 15:57:28 +00002502void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2503 Token::Value op,
Steve Block44f0eee2011-05-26 01:26:41 +01002504 OverwriteMode mode) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002505 __ mov(a0, result_register());
2506 __ pop(a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002507 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002508 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002509 CallIC(code, expr->BinaryOperationFeedbackId());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002510 patch_site.EmitPatchInfo();
Ben Murdoch257744e2011-11-30 15:57:28 +00002511 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01002512}
2513
2514
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002515void FullCodeGenerator::EmitAssignment(Expression* expr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002516 DCHECK(expr->IsValidReferenceExpression());
Ben Murdoch257744e2011-11-30 15:57:28 +00002517
Ben Murdoch257744e2011-11-30 15:57:28 +00002518 Property* prop = expr->AsProperty();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002519 LhsKind assign_type = GetAssignType(prop);
Ben Murdoch257744e2011-11-30 15:57:28 +00002520
2521 switch (assign_type) {
2522 case VARIABLE: {
2523 Variable* var = expr->AsVariableProxy()->var();
2524 EffectContext context(this);
2525 EmitVariableAssignment(var, Token::ASSIGN);
2526 break;
2527 }
2528 case NAMED_PROPERTY: {
2529 __ push(result_register()); // Preserve value.
2530 VisitForAccumulatorValue(prop->obj());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002531 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2532 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2533 __ li(StoreDescriptor::NameRegister(),
2534 Operand(prop->key()->AsLiteral()->value()));
2535 CallStoreIC();
Ben Murdoch257744e2011-11-30 15:57:28 +00002536 break;
2537 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002538 case NAMED_SUPER_PROPERTY: {
2539 __ Push(v0);
2540 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2541 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2542 // stack: value, this; v0: home_object
2543 Register scratch = a2;
2544 Register scratch2 = a3;
2545 __ mov(scratch, result_register()); // home_object
2546 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2547 __ lw(scratch2, MemOperand(sp, 0)); // this
2548 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2549 __ sw(scratch, MemOperand(sp, 0)); // home_object
2550 // stack: this, home_object; v0: value
2551 EmitNamedSuperPropertyStore(prop);
2552 break;
2553 }
2554 case KEYED_SUPER_PROPERTY: {
2555 __ Push(v0);
2556 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2557 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2558 __ Push(result_register());
2559 VisitForAccumulatorValue(prop->key());
2560 Register scratch = a2;
2561 Register scratch2 = a3;
2562 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2563 // stack: value, this, home_object; v0: key, a3: value
2564 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2565 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2566 __ lw(scratch, MemOperand(sp, 0)); // home_object
2567 __ sw(scratch, MemOperand(sp, kPointerSize));
2568 __ sw(v0, MemOperand(sp, 0));
2569 __ Move(v0, scratch2);
2570 // stack: this, home_object, key; v0: value.
2571 EmitKeyedSuperPropertyStore(prop);
2572 break;
2573 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002574 case KEYED_PROPERTY: {
2575 __ push(result_register()); // Preserve value.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002576 VisitForStackValue(prop->obj());
2577 VisitForAccumulatorValue(prop->key());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002578 __ mov(StoreDescriptor::NameRegister(), result_register());
2579 __ Pop(StoreDescriptor::ValueRegister(),
2580 StoreDescriptor::ReceiverRegister());
2581 Handle<Code> ic =
2582 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002583 CallIC(ic);
Ben Murdoch257744e2011-11-30 15:57:28 +00002584 break;
2585 }
2586 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002587 context()->Plug(v0);
Andrei Popescu31002712010-02-23 13:46:05 +00002588}
2589
2590
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002591void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2592 Variable* var, MemOperand location) {
2593 __ sw(result_register(), location);
2594 if (var->IsContextSlot()) {
2595 // RecordWrite may destroy all its register arguments.
2596 __ Move(a3, result_register());
2597 int offset = Context::SlotOffset(var->index());
2598 __ RecordWriteContextSlot(
2599 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2600 }
2601}
2602
2603
2604void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002605 if (var->IsUnallocated()) {
2606 // Global var, const, or let.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002607 __ mov(StoreDescriptor::ValueRegister(), result_register());
2608 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2609 __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2610 CallStoreIC();
Ben Murdoch257744e2011-11-30 15:57:28 +00002611
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002612 } else if (op == Token::INIT_CONST_LEGACY) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002613 // Const initializers need a write barrier.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002614 DCHECK(!var->IsParameter()); // No const parameters.
2615 if (var->IsLookupSlot()) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002616 __ li(a0, Operand(var->name()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002617 __ Push(v0, cp, a0); // Context and name.
2618 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2619 } else {
2620 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2621 Label skip;
2622 MemOperand location = VarOperand(var, a1);
2623 __ lw(a2, location);
2624 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2625 __ Branch(&skip, ne, a2, Operand(at));
2626 EmitStoreToStackLocalOrContextSlot(var, location);
2627 __ bind(&skip);
Ben Murdoch257744e2011-11-30 15:57:28 +00002628 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002629
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002630 } else if (var->mode() == LET && op != Token::INIT_LET) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002631 // Non-initializing assignment to let variable needs a write barrier.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002632 DCHECK(!var->IsLookupSlot());
2633 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2634 Label assign;
2635 MemOperand location = VarOperand(var, a1);
2636 __ lw(a3, location);
2637 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2638 __ Branch(&assign, ne, a3, Operand(t0));
2639 __ li(a3, Operand(var->name()));
2640 __ push(a3);
2641 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2642 // Perform the assignment.
2643 __ bind(&assign);
2644 EmitStoreToStackLocalOrContextSlot(var, location);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002645 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2646 if (var->IsLookupSlot()) {
2647 // Assignment to var.
2648 __ li(a1, Operand(var->name()));
2649 __ li(a0, Operand(Smi::FromInt(strict_mode())));
2650 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
2651 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2652 } else {
2653 // Assignment to var or initializing assignment to let/const in harmony
2654 // mode.
2655 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
Ben Murdoch589d6972011-11-30 16:04:58 +00002656 MemOperand location = VarOperand(var, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002657 if (generate_debug_code_ && op == Token::INIT_LET) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002658 // Check for an uninitialized let binding.
2659 __ lw(a2, location);
2660 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002661 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
Ben Murdoch589d6972011-11-30 16:04:58 +00002662 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002663 EmitStoreToStackLocalOrContextSlot(var, location);
Ben Murdoch257744e2011-11-30 15:57:28 +00002664 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002665 } else if (IsSignallingAssignmentToConst(var, op, strict_mode())) {
2666 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002667 }
Andrei Popescu31002712010-02-23 13:46:05 +00002668}
2669
2670
2671void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002672 // Assignment to a property, using a named store IC.
2673 Property* prop = expr->target()->AsProperty();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002674 DCHECK(prop != NULL);
2675 DCHECK(prop->key()->IsLiteral());
Ben Murdoch257744e2011-11-30 15:57:28 +00002676
2677 // Record source code position before IC call.
2678 SetSourcePosition(expr->position());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002679 __ mov(StoreDescriptor::ValueRegister(), result_register());
2680 __ li(StoreDescriptor::NameRegister(),
2681 Operand(prop->key()->AsLiteral()->value()));
2682 __ pop(StoreDescriptor::ReceiverRegister());
2683 CallStoreIC(expr->AssignmentFeedbackId());
Ben Murdoch257744e2011-11-30 15:57:28 +00002684
Ben Murdoch257744e2011-11-30 15:57:28 +00002685 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2686 context()->Plug(v0);
Andrei Popescu31002712010-02-23 13:46:05 +00002687}
2688
2689
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002690void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2691 // Assignment to named property of super.
2692 // v0 : value
2693 // stack : receiver ('this'), home_object
2694 DCHECK(prop != NULL);
2695 Literal* key = prop->key()->AsLiteral();
2696 DCHECK(key != NULL);
2697
2698 __ Push(key->value());
2699 __ Push(v0);
2700 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2701 : Runtime::kStoreToSuper_Sloppy),
2702 4);
2703}
2704
2705
2706void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2707 // Assignment to named property of super.
2708 // v0 : value
2709 // stack : receiver ('this'), home_object, key
2710 DCHECK(prop != NULL);
2711
2712 __ Push(v0);
2713 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreKeyedToSuper_Strict
2714 : Runtime::kStoreKeyedToSuper_Sloppy),
2715 4);
2716}
2717
2718
Andrei Popescu31002712010-02-23 13:46:05 +00002719void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002720 // Assignment to a property, using a keyed store IC.
2721
Ben Murdoch257744e2011-11-30 15:57:28 +00002722 // Record source code position before IC call.
2723 SetSourcePosition(expr->position());
2724 // Call keyed store IC.
2725 // The arguments are:
2726 // - a0 is the value,
2727 // - a1 is the key,
2728 // - a2 is the receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002729 __ mov(StoreDescriptor::ValueRegister(), result_register());
2730 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2731 DCHECK(StoreDescriptor::ValueRegister().is(a0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002732
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002733 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2734 CallIC(ic, expr->AssignmentFeedbackId());
Ben Murdoch257744e2011-11-30 15:57:28 +00002735
Ben Murdoch257744e2011-11-30 15:57:28 +00002736 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2737 context()->Plug(v0);
Andrei Popescu31002712010-02-23 13:46:05 +00002738}
2739
2740
2741void FullCodeGenerator::VisitProperty(Property* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002742 Comment cmnt(masm_, "[ Property");
2743 Expression* key = expr->key();
2744
2745 if (key->IsPropertyName()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002746 if (!expr->IsSuperAccess()) {
2747 VisitForAccumulatorValue(expr->obj());
2748 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2749 EmitNamedPropertyLoad(expr);
2750 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002751 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2752 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2753 __ Push(result_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002754 EmitNamedSuperPropertyLoad(expr);
2755 }
2756 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +00002757 context()->Plug(v0);
2758 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002759 if (!expr->IsSuperAccess()) {
2760 VisitForStackValue(expr->obj());
2761 VisitForAccumulatorValue(expr->key());
2762 __ Move(LoadDescriptor::NameRegister(), v0);
2763 __ pop(LoadDescriptor::ReceiverRegister());
2764 EmitKeyedPropertyLoad(expr);
2765 } else {
2766 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2767 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2768 __ Push(result_register());
2769 VisitForStackValue(expr->key());
2770 EmitKeyedSuperPropertyLoad(expr);
2771 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002772 context()->Plug(v0);
2773 }
Andrei Popescu31002712010-02-23 13:46:05 +00002774}
2775
Steve Block44f0eee2011-05-26 01:26:41 +01002776
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002777void FullCodeGenerator::CallIC(Handle<Code> code,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002778 TypeFeedbackId id) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002779 ic_total_count_++;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002780 __ Call(code, RelocInfo::CODE_TARGET, id);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002781}
2782
2783
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002784// Code common for calls using the IC.
2785void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2786 Expression* callee = expr->expression();
2787
2788 CallICState::CallType call_type =
2789 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2790
2791 // Get the target function.
2792 if (call_type == CallICState::FUNCTION) {
2793 { StackValueContext context(this);
2794 EmitVariableLoad(callee->AsVariableProxy());
2795 PrepareForBailout(callee, NO_REGISTERS);
Ben Murdoch257744e2011-11-30 15:57:28 +00002796 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002797 // Push undefined as receiver. This is patched in the method prologue if it
2798 // is a sloppy mode method.
2799 __ Push(isolate()->factory()->undefined_value());
2800 } else {
2801 // Load the function from the receiver.
2802 DCHECK(callee->IsProperty());
2803 DCHECK(!callee->AsProperty()->IsSuperAccess());
2804 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2805 EmitNamedPropertyLoad(callee->AsProperty());
2806 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2807 // Push the target function under the receiver.
2808 __ lw(at, MemOperand(sp, 0));
2809 __ push(at);
2810 __ sw(v0, MemOperand(sp, kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002811 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002812
2813 EmitCall(expr, call_type);
Andrei Popescu31002712010-02-23 13:46:05 +00002814}
2815
2816
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002817void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2818 Expression* callee = expr->expression();
2819 DCHECK(callee->IsProperty());
2820 Property* prop = callee->AsProperty();
2821 DCHECK(prop->IsSuperAccess());
2822
2823 SetSourcePosition(prop->position());
2824 Literal* key = prop->key()->AsLiteral();
2825 DCHECK(!key->value()->IsSmi());
2826 // Load the function from the receiver.
2827 const Register scratch = a1;
2828 SuperReference* super_ref = prop->obj()->AsSuperReference();
2829 EmitLoadHomeObject(super_ref);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002830 __ mov(scratch, v0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002831 VisitForAccumulatorValue(super_ref->this_var());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002832 __ Push(scratch, v0, v0, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002833 __ Push(key->value());
2834
2835 // Stack here:
2836 // - home_object
2837 // - this (receiver)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002838 // - this (receiver) <-- LoadFromSuper will pop here and below.
2839 // - home_object
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002840 // - key
2841 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2842
2843 // Replace home_object with target function.
2844 __ sw(v0, MemOperand(sp, kPointerSize));
2845
2846 // Stack here:
2847 // - target function
2848 // - this (receiver)
2849 EmitCall(expr, CallICState::METHOD);
2850}
2851
2852
2853// Code common for calls using the IC.
2854void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2855 Expression* key) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002856 // Load the key.
2857 VisitForAccumulatorValue(key);
2858
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002859 Expression* callee = expr->expression();
Ben Murdoch257744e2011-11-30 15:57:28 +00002860
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002861 // Load the function from the receiver.
2862 DCHECK(callee->IsProperty());
2863 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2864 __ Move(LoadDescriptor::NameRegister(), v0);
2865 EmitKeyedPropertyLoad(callee->AsProperty());
2866 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2867
2868 // Push the target function under the receiver.
2869 __ lw(at, MemOperand(sp, 0));
2870 __ push(at);
2871 __ sw(v0, MemOperand(sp, kPointerSize));
2872
2873 EmitCall(expr, CallICState::METHOD);
Steve Block44f0eee2011-05-26 01:26:41 +01002874}
2875
2876
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002877void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2878 Expression* callee = expr->expression();
2879 DCHECK(callee->IsProperty());
2880 Property* prop = callee->AsProperty();
2881 DCHECK(prop->IsSuperAccess());
2882
2883 SetSourcePosition(prop->position());
2884 // Load the function from the receiver.
2885 const Register scratch = a1;
2886 SuperReference* super_ref = prop->obj()->AsSuperReference();
2887 EmitLoadHomeObject(super_ref);
2888 __ Move(scratch, v0);
2889 VisitForAccumulatorValue(super_ref->this_var());
2890 __ Push(scratch, v0, v0, scratch);
2891 VisitForStackValue(prop->key());
2892
2893 // Stack here:
2894 // - home_object
2895 // - this (receiver)
2896 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2897 // - home_object
2898 // - key
2899 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2900
2901 // Replace home_object with target function.
2902 __ sw(v0, MemOperand(sp, kPointerSize));
2903
2904 // Stack here:
2905 // - target function
2906 // - this (receiver)
2907 EmitCall(expr, CallICState::METHOD);
2908}
2909
2910
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002911void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2912 // Load the arguments.
Ben Murdoch257744e2011-11-30 15:57:28 +00002913 ZoneList<Expression*>* args = expr->arguments();
2914 int arg_count = args->length();
2915 { PreservePositionScope scope(masm()->positions_recorder());
2916 for (int i = 0; i < arg_count; i++) {
2917 VisitForStackValue(args->at(i));
2918 }
2919 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002920
2921 // Record source position of the IC call.
Ben Murdoch257744e2011-11-30 15:57:28 +00002922 SetSourcePosition(expr->position());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002923 Handle<Code> ic = CallIC::initialize_stub(
2924 isolate(), arg_count, call_type);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002925 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002926 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002927 // Don't assign a type feedback id to the IC, since type feedback is provided
2928 // by the vector above.
2929 CallIC(ic);
2930
Ben Murdoch257744e2011-11-30 15:57:28 +00002931 RecordJSReturnSite(expr);
2932 // Restore context register.
2933 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2934 context()->DropAndPlug(1, v0);
2935}
2936
2937
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002938void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002939 // t3: copy of the first argument or undefined if it doesn't exist.
Ben Murdoch257744e2011-11-30 15:57:28 +00002940 if (arg_count > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002941 __ lw(t3, MemOperand(sp, arg_count * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002942 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002943 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00002944 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002945
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002946 // t2: the receiver of the enclosing function.
2947 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2948
2949 // t1: the receiver of the enclosing function.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002950 int receiver_offset = 2 + info_->scope()->num_parameters();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002951 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002952
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002953 // t0: the strict mode.
2954 __ li(t0, Operand(Smi::FromInt(strict_mode())));
2955
2956 // a1: the start position of the scope the calls resides in.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002957 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002958
2959 // Do the runtime call.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002960 __ Push(t3);
2961 __ Push(t2, t1, t0, a1);
2962 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
Andrei Popescu31002712010-02-23 13:46:05 +00002963}
2964
2965
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002966void FullCodeGenerator::EmitLoadSuperConstructor(SuperReference* super_ref) {
2967 DCHECK(super_ref != NULL);
2968 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2969 __ Push(a0);
2970 __ CallRuntime(Runtime::kGetPrototype, 1);
2971}
2972
2973
Andrei Popescu31002712010-02-23 13:46:05 +00002974void FullCodeGenerator::VisitCall(Call* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002975#ifdef DEBUG
2976 // We want to verify that RecordJSReturnSite gets called on all paths
2977 // through this function. Avoid early returns.
2978 expr->return_is_recorded_ = false;
2979#endif
2980
2981 Comment cmnt(masm_, "[ Call");
Ben Murdoch589d6972011-11-30 16:04:58 +00002982 Expression* callee = expr->expression();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002983 Call::CallType call_type = expr->GetCallType(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00002984
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002985 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2986 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2987 // to resolve the function we need to call and the receiver of the
Ben Murdoch257744e2011-11-30 15:57:28 +00002988 // call. Then we call the resolved function using the given
2989 // arguments.
2990 ZoneList<Expression*>* args = expr->arguments();
2991 int arg_count = args->length();
2992
2993 { PreservePositionScope pos_scope(masm()->positions_recorder());
Ben Murdoch589d6972011-11-30 16:04:58 +00002994 VisitForStackValue(callee);
Ben Murdoch257744e2011-11-30 15:57:28 +00002995 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2996 __ push(a2); // Reserved receiver slot.
2997
2998 // Push the arguments.
2999 for (int i = 0; i < arg_count; i++) {
3000 VisitForStackValue(args->at(i));
3001 }
Ben Murdoch589d6972011-11-30 16:04:58 +00003002
Ben Murdoch589d6972011-11-30 16:04:58 +00003003 // Push a copy of the function (found below the arguments) and
Ben Murdoch257744e2011-11-30 15:57:28 +00003004 // resolve eval.
3005 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3006 __ push(a1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003007 EmitResolvePossiblyDirectEval(arg_count);
Ben Murdoch257744e2011-11-30 15:57:28 +00003008
3009 // The runtime call returns a pair of values in v0 (function) and
3010 // v1 (receiver). Touch up the stack with the right values.
3011 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3012 __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003013
3014 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
Ben Murdoch257744e2011-11-30 15:57:28 +00003015 }
3016 // Record source position for debugger.
3017 SetSourcePosition(expr->position());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003018 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003019 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003020 __ CallStub(&stub);
3021 RecordJSReturnSite(expr);
3022 // Restore context register.
3023 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3024 context()->DropAndPlug(1, v0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003025 } else if (call_type == Call::GLOBAL_CALL) {
3026 EmitCallWithLoadIC(expr);
3027 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003028 // Call to a lookup slot (dynamically introduced variable).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003029 VariableProxy* proxy = callee->AsVariableProxy();
Ben Murdoch257744e2011-11-30 15:57:28 +00003030 Label slow, done;
3031
3032 { PreservePositionScope scope(masm()->positions_recorder());
3033 // Generate code for loading from variables potentially shadowed
3034 // by eval-introduced variables.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003035 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003036 }
3037
3038 __ bind(&slow);
3039 // Call the runtime to find the function to call (returned in v0)
3040 // and the object holding it (returned in v1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003041 DCHECK(!context_register().is(a2));
Ben Murdoch589d6972011-11-30 16:04:58 +00003042 __ li(a2, Operand(proxy->name()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003043 __ Push(context_register(), a2);
3044 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003045 __ Push(v0, v1); // Function, receiver.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003046 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
Ben Murdoch257744e2011-11-30 15:57:28 +00003047
3048 // If fast case code has been generated, emit code to push the
3049 // function and receiver and have the slow path jump around this
3050 // code.
3051 if (done.is_linked()) {
3052 Label call;
3053 __ Branch(&call);
3054 __ bind(&done);
3055 // Push function.
3056 __ push(v0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003057 // The receiver is implicitly the global receiver. Indicate this
3058 // by passing the hole to the call function stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003059 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00003060 __ push(a1);
3061 __ bind(&call);
3062 }
3063
3064 // The receiver is either the global receiver or an object found
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003065 // by LoadContextSlot.
3066 EmitCall(expr);
3067 } else if (call_type == Call::PROPERTY_CALL) {
3068 Property* property = callee->AsProperty();
3069 bool is_named_call = property->key()->IsPropertyName();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003070 if (property->IsSuperAccess()) {
3071 if (is_named_call) {
3072 EmitSuperCallWithLoadIC(expr);
3073 } else {
3074 EmitKeyedSuperCallWithLoadIC(expr);
3075 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003076 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003077 {
3078 PreservePositionScope scope(masm()->positions_recorder());
3079 VisitForStackValue(property->obj());
3080 }
3081 if (is_named_call) {
3082 EmitCallWithLoadIC(expr);
3083 } else {
3084 EmitKeyedCallWithLoadIC(expr, property->key());
3085 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003086 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003087 } else if (call_type == Call::SUPER_CALL) {
3088 SuperReference* super_ref = callee->AsSuperReference();
3089 EmitLoadSuperConstructor(super_ref);
3090 __ Push(result_register());
3091 VisitForStackValue(super_ref->this_var());
3092 EmitCall(expr, CallICState::METHOD);
Ben Murdoch257744e2011-11-30 15:57:28 +00003093 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003094 DCHECK(call_type == Call::OTHER_CALL);
Ben Murdoch589d6972011-11-30 16:04:58 +00003095 // Call to an arbitrary expression not handled specially above.
Ben Murdoch257744e2011-11-30 15:57:28 +00003096 { PreservePositionScope scope(masm()->positions_recorder());
Ben Murdoch589d6972011-11-30 16:04:58 +00003097 VisitForStackValue(callee);
Ben Murdoch257744e2011-11-30 15:57:28 +00003098 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003099 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00003100 __ push(a1);
3101 // Emit function call.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003102 EmitCall(expr);
Ben Murdoch257744e2011-11-30 15:57:28 +00003103 }
3104
3105#ifdef DEBUG
3106 // RecordJSReturnSite should have been called.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003107 DCHECK(expr->return_is_recorded_);
Ben Murdoch257744e2011-11-30 15:57:28 +00003108#endif
Andrei Popescu31002712010-02-23 13:46:05 +00003109}
3110
3111
3112void FullCodeGenerator::VisitCallNew(CallNew* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003113 Comment cmnt(masm_, "[ CallNew");
3114 // According to ECMA-262, section 11.2.2, page 44, the function
3115 // expression in new calls must be evaluated before the
3116 // arguments.
3117
3118 // Push constructor on the stack. If it's not a function it's used as
3119 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3120 // ignored.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003121 if (expr->expression()->IsSuperReference()) {
3122 EmitLoadSuperConstructor(expr->expression()->AsSuperReference());
3123 __ Push(result_register());
3124 } else {
3125 VisitForStackValue(expr->expression());
3126 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003127
3128 // Push the arguments ("left-to-right") on the stack.
3129 ZoneList<Expression*>* args = expr->arguments();
3130 int arg_count = args->length();
3131 for (int i = 0; i < arg_count; i++) {
3132 VisitForStackValue(args->at(i));
3133 }
3134
3135 // Call the construct call builtin that handles allocation and
3136 // constructor invocation.
3137 SetSourcePosition(expr->position());
3138
3139 // Load function and argument count into a1 and a0.
3140 __ li(a0, Operand(arg_count));
3141 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
3142
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003143 // Record call targets in unoptimized code.
3144 if (FLAG_pretenuring_call_new) {
3145 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003146 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3147 expr->CallNewFeedbackSlot().ToInt() + 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003148 }
3149
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003150 __ li(a2, FeedbackVector());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003151 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003152
3153 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003154 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3155 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +00003156 context()->Plug(v0);
Andrei Popescu31002712010-02-23 13:46:05 +00003157}
3158
3159
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003160void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3161 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003162 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003163
3164 VisitForAccumulatorValue(args->at(0));
3165
3166 Label materialize_true, materialize_false;
3167 Label* if_true = NULL;
3168 Label* if_false = NULL;
3169 Label* fall_through = NULL;
3170 context()->PrepareTest(&materialize_true, &materialize_false,
3171 &if_true, &if_false, &fall_through);
3172
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003173 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003174 __ SmiTst(v0, t0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003175 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
3176
3177 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003178}
3179
3180
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003181void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3182 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003183 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003184
3185 VisitForAccumulatorValue(args->at(0));
3186
3187 Label materialize_true, materialize_false;
3188 Label* if_true = NULL;
3189 Label* if_false = NULL;
3190 Label* fall_through = NULL;
3191 context()->PrepareTest(&materialize_true, &materialize_false,
3192 &if_true, &if_false, &fall_through);
3193
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003194 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003195 __ NonNegativeSmiTst(v0, at);
Ben Murdoch257744e2011-11-30 15:57:28 +00003196 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3197
3198 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003199}
3200
3201
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003202void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3203 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003204 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003205
3206 VisitForAccumulatorValue(args->at(0));
3207
3208 Label materialize_true, materialize_false;
3209 Label* if_true = NULL;
3210 Label* if_false = NULL;
3211 Label* fall_through = NULL;
3212 context()->PrepareTest(&materialize_true, &materialize_false,
3213 &if_true, &if_false, &fall_through);
3214
3215 __ JumpIfSmi(v0, if_false);
3216 __ LoadRoot(at, Heap::kNullValueRootIndex);
3217 __ Branch(if_true, eq, v0, Operand(at));
3218 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3219 // Undetectable objects behave like undefined when tested with typeof.
3220 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3221 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3222 __ Branch(if_false, ne, at, Operand(zero_reg));
3223 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003224 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003225 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003226 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3227 if_true, if_false, fall_through);
Ben Murdoch257744e2011-11-30 15:57:28 +00003228
3229 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003230}
3231
3232
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003233void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3234 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003235 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003236
3237 VisitForAccumulatorValue(args->at(0));
3238
3239 Label materialize_true, materialize_false;
3240 Label* if_true = NULL;
3241 Label* if_false = NULL;
3242 Label* fall_through = NULL;
3243 context()->PrepareTest(&materialize_true, &materialize_false,
3244 &if_true, &if_false, &fall_through);
3245
3246 __ JumpIfSmi(v0, if_false);
3247 __ GetObjectType(v0, a1, a1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003248 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003249 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
Ben Murdoch257744e2011-11-30 15:57:28 +00003250 if_true, if_false, fall_through);
3251
3252 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003253}
3254
3255
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003256void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3257 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003258 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003259
3260 VisitForAccumulatorValue(args->at(0));
3261
3262 Label materialize_true, materialize_false;
3263 Label* if_true = NULL;
3264 Label* if_false = NULL;
3265 Label* fall_through = NULL;
3266 context()->PrepareTest(&materialize_true, &materialize_false,
3267 &if_true, &if_false, &fall_through);
3268
3269 __ JumpIfSmi(v0, if_false);
3270 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3271 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003272 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003273 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
Ben Murdoch257744e2011-11-30 15:57:28 +00003274 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3275
3276 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003277}
3278
3279
3280void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003281 CallRuntime* expr) {
3282 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003283 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003284
3285 VisitForAccumulatorValue(args->at(0));
3286
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003287 Label materialize_true, materialize_false, skip_lookup;
Ben Murdoch257744e2011-11-30 15:57:28 +00003288 Label* if_true = NULL;
3289 Label* if_false = NULL;
3290 Label* fall_through = NULL;
3291 context()->PrepareTest(&materialize_true, &materialize_false,
3292 &if_true, &if_false, &fall_through);
3293
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003294 __ AssertNotSmi(v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003295
3296 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3297 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
3298 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003299 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003300
3301 // Check for fast case object. Generate false result for slow case object.
3302 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3303 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3304 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3305 __ Branch(if_false, eq, a2, Operand(t0));
3306
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003307 // Look for valueOf name in the descriptor array, and indicate false if
3308 // found. Since we omit an enumeration index check, if it is added via a
3309 // transition that shares its descriptor array, this is a false positive.
3310 Label entry, loop, done;
3311
3312 // Skip loop if no descriptors are valid.
3313 __ NumberOfOwnDescriptors(a3, a1);
3314 __ Branch(&done, eq, a3, Operand(zero_reg));
3315
Ben Murdoch257744e2011-11-30 15:57:28 +00003316 __ LoadInstanceDescriptors(a1, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003317 // t0: descriptor array.
3318 // a3: valid entries in the descriptor array.
Ben Murdoch257744e2011-11-30 15:57:28 +00003319 STATIC_ASSERT(kSmiTag == 0);
3320 STATIC_ASSERT(kSmiTagSize == 1);
3321 STATIC_ASSERT(kPointerSize == 4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003322 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3323 __ Mul(a3, a3, at);
3324 // Calculate location of the first key name.
3325 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3326 // Calculate the end of the descriptor array.
3327 __ mov(a2, t0);
3328 __ sll(t1, a3, kPointerSizeLog2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003329 __ Addu(a2, a2, t1);
3330
Ben Murdoch257744e2011-11-30 15:57:28 +00003331 // Loop through all the keys in the descriptor array. If one of these is the
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003332 // string "valueOf" the result is false.
3333 // The use of t2 to store the valueOf string assumes that it is not otherwise
Ben Murdoch257744e2011-11-30 15:57:28 +00003334 // used in the loop below.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003335 __ li(t2, Operand(isolate()->factory()->value_of_string()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003336 __ jmp(&entry);
3337 __ bind(&loop);
3338 __ lw(a3, MemOperand(t0, 0));
3339 __ Branch(if_false, eq, a3, Operand(t2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003340 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003341 __ bind(&entry);
3342 __ Branch(&loop, ne, t0, Operand(a2));
3343
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003344 __ bind(&done);
3345
3346 // Set the bit in the map to indicate that there is no local valueOf field.
3347 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3348 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3349 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3350
3351 __ bind(&skip_lookup);
3352
3353 // If a valueOf property is not found on the object check that its
Ben Murdoch257744e2011-11-30 15:57:28 +00003354 // prototype is the un-modified String prototype. If not result is false.
3355 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3356 __ JumpIfSmi(a2, if_false);
3357 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003358 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3359 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003360 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003361 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003362 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3363
Ben Murdoch257744e2011-11-30 15:57:28 +00003364 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003365}
3366
3367
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003368void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3369 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003370 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003371
3372 VisitForAccumulatorValue(args->at(0));
3373
3374 Label materialize_true, materialize_false;
3375 Label* if_true = NULL;
3376 Label* if_false = NULL;
3377 Label* fall_through = NULL;
3378 context()->PrepareTest(&materialize_true, &materialize_false,
3379 &if_true, &if_false, &fall_through);
3380
3381 __ JumpIfSmi(v0, if_false);
3382 __ GetObjectType(v0, a1, a2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003383 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00003384 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3385 __ Branch(if_false);
3386
3387 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003388}
3389
3390
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003391void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3392 ZoneList<Expression*>* args = expr->arguments();
3393 DCHECK(args->length() == 1);
3394
3395 VisitForAccumulatorValue(args->at(0));
3396
3397 Label materialize_true, materialize_false;
3398 Label* if_true = NULL;
3399 Label* if_false = NULL;
3400 Label* fall_through = NULL;
3401 context()->PrepareTest(&materialize_true, &materialize_false,
3402 &if_true, &if_false, &fall_through);
3403
3404 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3405 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3406 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3407 __ li(t0, 0x80000000);
3408 Label not_nan;
3409 __ Branch(&not_nan, ne, a2, Operand(t0));
3410 __ mov(t0, zero_reg);
3411 __ mov(a2, a1);
3412 __ bind(&not_nan);
3413
3414 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3415 Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3416
3417 context()->Plug(if_true, if_false);
3418}
3419
3420
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003421void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3422 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003423 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003424
3425 VisitForAccumulatorValue(args->at(0));
3426
3427 Label materialize_true, materialize_false;
3428 Label* if_true = NULL;
3429 Label* if_false = NULL;
3430 Label* fall_through = NULL;
3431 context()->PrepareTest(&materialize_true, &materialize_false,
3432 &if_true, &if_false, &fall_through);
3433
3434 __ JumpIfSmi(v0, if_false);
3435 __ GetObjectType(v0, a1, a1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003436 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00003437 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3438 if_true, if_false, fall_through);
3439
3440 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003441}
3442
3443
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003444void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3445 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003446 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003447
3448 VisitForAccumulatorValue(args->at(0));
3449
3450 Label materialize_true, materialize_false;
3451 Label* if_true = NULL;
3452 Label* if_false = NULL;
3453 Label* fall_through = NULL;
3454 context()->PrepareTest(&materialize_true, &materialize_false,
3455 &if_true, &if_false, &fall_through);
3456
3457 __ JumpIfSmi(v0, if_false);
3458 __ GetObjectType(v0, a1, a1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003459 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00003460 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3461
3462 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003463}
3464
3465
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003466void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3467 ZoneList<Expression*>* args = expr->arguments();
3468 DCHECK(args->length() == 1);
3469
3470 VisitForAccumulatorValue(args->at(0));
3471
3472 Label materialize_true, materialize_false;
3473 Label* if_true = NULL;
3474 Label* if_false = NULL;
3475 Label* fall_through = NULL;
3476 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3477 &if_false, &fall_through);
3478
3479 __ JumpIfSmi(v0, if_false);
3480 Register map = a1;
3481 Register type_reg = a2;
3482 __ GetObjectType(v0, map, type_reg);
3483 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3484 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3485 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3486 if_true, if_false, fall_through);
3487
3488 context()->Plug(if_true, if_false);
3489}
3490
3491
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003492void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003493 DCHECK(expr->arguments()->length() == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003494
3495 Label materialize_true, materialize_false;
3496 Label* if_true = NULL;
3497 Label* if_false = NULL;
3498 Label* fall_through = NULL;
3499 context()->PrepareTest(&materialize_true, &materialize_false,
3500 &if_true, &if_false, &fall_through);
3501
3502 // Get the frame pointer for the calling frame.
3503 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3504
3505 // Skip the arguments adaptor frame if it exists.
3506 Label check_frame_marker;
3507 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3508 __ Branch(&check_frame_marker, ne,
3509 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3510 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3511
3512 // Check the marker in the calling frame.
3513 __ bind(&check_frame_marker);
3514 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003515 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00003516 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3517 if_true, if_false, fall_through);
3518
3519 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003520}
3521
3522
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003523void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3524 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003525 DCHECK(args->length() == 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003526
3527 // Load the two objects into registers and perform the comparison.
3528 VisitForStackValue(args->at(0));
3529 VisitForAccumulatorValue(args->at(1));
3530
3531 Label materialize_true, materialize_false;
3532 Label* if_true = NULL;
3533 Label* if_false = NULL;
3534 Label* fall_through = NULL;
3535 context()->PrepareTest(&materialize_true, &materialize_false,
3536 &if_true, &if_false, &fall_through);
3537
3538 __ pop(a1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003539 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00003540 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3541
3542 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01003543}
3544
3545
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003546void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3547 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003548 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003549
3550 // ArgumentsAccessStub expects the key in a1 and the formal
3551 // parameter count in a0.
3552 VisitForAccumulatorValue(args->at(0));
3553 __ mov(a1, v0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003554 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003555 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003556 __ CallStub(&stub);
3557 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003558}
3559
3560
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003561void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003562 DCHECK(expr->arguments()->length() == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003563 Label exit;
3564 // Get the number of formal parameters.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003565 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
Ben Murdoch257744e2011-11-30 15:57:28 +00003566
3567 // Check if the calling frame is an arguments adaptor frame.
3568 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3569 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3570 __ Branch(&exit, ne, a3,
3571 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3572
3573 // Arguments adaptor case: Read the arguments length from the
3574 // adaptor frame.
3575 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3576
3577 __ bind(&exit);
3578 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003579}
3580
3581
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003582void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3583 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003584 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003585 Label done, null, function, non_function_constructor;
3586
3587 VisitForAccumulatorValue(args->at(0));
3588
3589 // If the object is a smi, we return null.
3590 __ JumpIfSmi(v0, &null);
3591
3592 // Check that the object is a JS object but take special care of JS
3593 // functions to make sure they have 'Function' as their class.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003594 // Assume that there are only two callable types, and one of them is at
3595 // either end of the type range for JS object types. Saves extra comparisons.
3596 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003597 __ GetObjectType(v0, v0, a1); // Map is now in v0.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003598 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00003599
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003600 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3601 FIRST_SPEC_OBJECT_TYPE + 1);
3602 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00003603
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003604 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3605 LAST_SPEC_OBJECT_TYPE - 1);
3606 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3607 // Assume that there is no larger type.
3608 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3609
3610 // Check if the constructor in the map is a JS function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003611 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
3612 __ GetObjectType(v0, a1, a1);
3613 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3614
3615 // v0 now contains the constructor function. Grab the
3616 // instance class name from there.
3617 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3618 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3619 __ Branch(&done);
3620
3621 // Functions have class 'Function'.
3622 __ bind(&function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003623 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00003624 __ jmp(&done);
3625
3626 // Objects with a non-function constructor have class 'Object'.
3627 __ bind(&non_function_constructor);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003628 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00003629 __ jmp(&done);
3630
3631 // Non-JS objects have class null.
3632 __ bind(&null);
3633 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3634
3635 // All done.
3636 __ bind(&done);
3637
3638 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003639}
3640
3641
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003642void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003643 // Load the arguments on the stack and call the stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003644 SubStringStub stub(isolate());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003645 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003646 DCHECK(args->length() == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003647 VisitForStackValue(args->at(0));
3648 VisitForStackValue(args->at(1));
3649 VisitForStackValue(args->at(2));
3650 __ CallStub(&stub);
3651 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003652}
3653
3654
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003655void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003656 // Load the arguments on the stack and call the stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003657 RegExpExecStub stub(isolate());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003658 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003659 DCHECK(args->length() == 4);
Ben Murdoch257744e2011-11-30 15:57:28 +00003660 VisitForStackValue(args->at(0));
3661 VisitForStackValue(args->at(1));
3662 VisitForStackValue(args->at(2));
3663 VisitForStackValue(args->at(3));
3664 __ CallStub(&stub);
3665 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003666}
3667
3668
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003669void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3670 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003671 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003672
3673 VisitForAccumulatorValue(args->at(0)); // Load the object.
3674
3675 Label done;
3676 // If the object is a smi return the object.
3677 __ JumpIfSmi(v0, &done);
3678 // If the object is not a value type, return the object.
3679 __ GetObjectType(v0, a1, a1);
3680 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3681
3682 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3683
3684 __ bind(&done);
3685 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003686}
3687
3688
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003689void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3690 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003691 DCHECK(args->length() == 2);
3692 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3693 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003694
3695 VisitForAccumulatorValue(args->at(0)); // Load the object.
3696
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003697 Label runtime, done, not_date_object;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003698 Register object = v0;
3699 Register result = v0;
3700 Register scratch0 = t5;
3701 Register scratch1 = a1;
3702
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003703 __ JumpIfSmi(object, &not_date_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003704 __ GetObjectType(object, scratch1, scratch1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003705 __ Branch(&not_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003706
3707 if (index->value() == 0) {
3708 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003709 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003710 } else {
3711 if (index->value() < JSDate::kFirstUncachedField) {
3712 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3713 __ li(scratch1, Operand(stamp));
3714 __ lw(scratch1, MemOperand(scratch1));
3715 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3716 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3717 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3718 kPointerSize * index->value()));
3719 __ jmp(&done);
3720 }
3721 __ bind(&runtime);
3722 __ PrepareCallCFunction(2, scratch1);
3723 __ li(a1, Operand(index));
3724 __ Move(a0, object);
3725 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003726 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003727 }
3728
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003729 __ bind(&not_date_object);
3730 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3731 __ bind(&done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003732 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003733}
3734
3735
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003736void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3737 ZoneList<Expression*>* args = expr->arguments();
3738 DCHECK_EQ(3, args->length());
3739
3740 Register string = v0;
3741 Register index = a1;
3742 Register value = a2;
3743
3744 VisitForStackValue(args->at(0)); // index
3745 VisitForStackValue(args->at(1)); // value
3746 VisitForAccumulatorValue(args->at(2)); // string
3747 __ Pop(index, value);
3748
3749 if (FLAG_debug_code) {
3750 __ SmiTst(value, at);
3751 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3752 __ SmiTst(index, at);
3753 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3754 __ SmiUntag(index, index);
3755 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3756 Register scratch = t5;
3757 __ EmitSeqStringSetCharCheck(
3758 string, index, value, scratch, one_byte_seq_type);
3759 __ SmiTag(index, index);
3760 }
3761
3762 __ SmiUntag(value, value);
3763 __ Addu(at,
3764 string,
3765 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3766 __ SmiUntag(index);
3767 __ Addu(at, at, index);
3768 __ sb(value, MemOperand(at));
3769 context()->Plug(string);
3770}
3771
3772
3773void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3774 ZoneList<Expression*>* args = expr->arguments();
3775 DCHECK_EQ(3, args->length());
3776
3777 Register string = v0;
3778 Register index = a1;
3779 Register value = a2;
3780
3781 VisitForStackValue(args->at(0)); // index
3782 VisitForStackValue(args->at(1)); // value
3783 VisitForAccumulatorValue(args->at(2)); // string
3784 __ Pop(index, value);
3785
3786 if (FLAG_debug_code) {
3787 __ SmiTst(value, at);
3788 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3789 __ SmiTst(index, at);
3790 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3791 __ SmiUntag(index, index);
3792 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3793 Register scratch = t5;
3794 __ EmitSeqStringSetCharCheck(
3795 string, index, value, scratch, two_byte_seq_type);
3796 __ SmiTag(index, index);
3797 }
3798
3799 __ SmiUntag(value, value);
3800 __ Addu(at,
3801 string,
3802 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3803 __ Addu(at, at, index);
3804 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3805 __ sh(value, MemOperand(at));
3806 context()->Plug(string);
3807}
3808
3809
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003810void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3811 // Load the arguments on the stack and call the runtime function.
3812 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003813 DCHECK(args->length() == 2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003814 VisitForStackValue(args->at(0));
3815 VisitForStackValue(args->at(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003816 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3817 __ CallStub(&stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003818 context()->Plug(v0);
3819}
3820
3821
3822void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3823 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003824 DCHECK(args->length() == 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003825
3826 VisitForStackValue(args->at(0)); // Load the object.
3827 VisitForAccumulatorValue(args->at(1)); // Load the value.
3828 __ pop(a1); // v0 = value. a1 = object.
3829
3830 Label done;
3831 // If the object is a smi, return the value.
3832 __ JumpIfSmi(a1, &done);
3833
3834 // If the object is not a value type, return the value.
3835 __ GetObjectType(a1, a2, a2);
3836 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3837
3838 // Store the value.
3839 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3840 // Update the write barrier. Save the value as it will be
3841 // overwritten by the write barrier code and is needed afterward.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003842 __ mov(a2, v0);
3843 __ RecordWriteField(
3844 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch257744e2011-11-30 15:57:28 +00003845
3846 __ bind(&done);
3847 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003848}
3849
3850
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003851void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3852 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003853 DCHECK_EQ(args->length(), 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003854
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003855 // Load the argument into a0 and call the stub.
3856 VisitForAccumulatorValue(args->at(0));
3857 __ mov(a0, result_register());
Ben Murdoch257744e2011-11-30 15:57:28 +00003858
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003859 NumberToStringStub stub(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00003860 __ CallStub(&stub);
3861 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003862}
3863
3864
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003865void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3866 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003867 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003868
3869 VisitForAccumulatorValue(args->at(0));
3870
3871 Label done;
3872 StringCharFromCodeGenerator generator(v0, a1);
3873 generator.GenerateFast(masm_);
3874 __ jmp(&done);
3875
3876 NopRuntimeCallHelper call_helper;
3877 generator.GenerateSlow(masm_, call_helper);
3878
3879 __ bind(&done);
3880 context()->Plug(a1);
Steve Block44f0eee2011-05-26 01:26:41 +01003881}
3882
3883
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003884void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3885 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003886 DCHECK(args->length() == 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003887
3888 VisitForStackValue(args->at(0));
3889 VisitForAccumulatorValue(args->at(1));
3890 __ mov(a0, result_register());
3891
3892 Register object = a1;
3893 Register index = a0;
Ben Murdoch257744e2011-11-30 15:57:28 +00003894 Register result = v0;
3895
3896 __ pop(object);
3897
3898 Label need_conversion;
3899 Label index_out_of_range;
3900 Label done;
3901 StringCharCodeAtGenerator generator(object,
3902 index,
Ben Murdoch257744e2011-11-30 15:57:28 +00003903 result,
3904 &need_conversion,
3905 &need_conversion,
3906 &index_out_of_range,
3907 STRING_INDEX_IS_NUMBER);
3908 generator.GenerateFast(masm_);
3909 __ jmp(&done);
3910
3911 __ bind(&index_out_of_range);
3912 // When the index is out of range, the spec requires us to return
3913 // NaN.
3914 __ LoadRoot(result, Heap::kNanValueRootIndex);
3915 __ jmp(&done);
3916
3917 __ bind(&need_conversion);
3918 // Load the undefined value into the result register, which will
3919 // trigger conversion.
3920 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3921 __ jmp(&done);
3922
3923 NopRuntimeCallHelper call_helper;
3924 generator.GenerateSlow(masm_, call_helper);
3925
3926 __ bind(&done);
3927 context()->Plug(result);
Steve Block44f0eee2011-05-26 01:26:41 +01003928}
3929
3930
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003931void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3932 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003933 DCHECK(args->length() == 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003934
3935 VisitForStackValue(args->at(0));
3936 VisitForAccumulatorValue(args->at(1));
3937 __ mov(a0, result_register());
3938
3939 Register object = a1;
3940 Register index = a0;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003941 Register scratch = a3;
Ben Murdoch257744e2011-11-30 15:57:28 +00003942 Register result = v0;
3943
3944 __ pop(object);
3945
3946 Label need_conversion;
3947 Label index_out_of_range;
3948 Label done;
3949 StringCharAtGenerator generator(object,
3950 index,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003951 scratch,
Ben Murdoch257744e2011-11-30 15:57:28 +00003952 result,
3953 &need_conversion,
3954 &need_conversion,
3955 &index_out_of_range,
3956 STRING_INDEX_IS_NUMBER);
3957 generator.GenerateFast(masm_);
3958 __ jmp(&done);
3959
3960 __ bind(&index_out_of_range);
3961 // When the index is out of range, the spec requires us to return
3962 // the empty string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003963 __ LoadRoot(result, Heap::kempty_stringRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00003964 __ jmp(&done);
3965
3966 __ bind(&need_conversion);
3967 // Move smi zero into the result register, which will trigger
3968 // conversion.
3969 __ li(result, Operand(Smi::FromInt(0)));
3970 __ jmp(&done);
3971
3972 NopRuntimeCallHelper call_helper;
3973 generator.GenerateSlow(masm_, call_helper);
3974
3975 __ bind(&done);
3976 context()->Plug(result);
Steve Block44f0eee2011-05-26 01:26:41 +01003977}
3978
3979
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003980void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3981 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003982 DCHECK_EQ(2, args->length());
Ben Murdoch257744e2011-11-30 15:57:28 +00003983 VisitForStackValue(args->at(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003984 VisitForAccumulatorValue(args->at(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003985
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003986 __ pop(a1);
3987 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
3988 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
Ben Murdoch257744e2011-11-30 15:57:28 +00003989 __ CallStub(&stub);
3990 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01003991}
3992
3993
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003994void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3995 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003996 DCHECK_EQ(2, args->length());
Ben Murdoch257744e2011-11-30 15:57:28 +00003997
3998 VisitForStackValue(args->at(0));
3999 VisitForStackValue(args->at(1));
4000
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004001 StringCompareStub stub(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00004002 __ CallStub(&stub);
4003 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01004004}
4005
4006
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004007void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4008 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004009 DCHECK(args->length() >= 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00004010
4011 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4012 for (int i = 0; i < arg_count + 1; i++) {
4013 VisitForStackValue(args->at(i));
4014 }
4015 VisitForAccumulatorValue(args->last()); // Function.
4016
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004017 Label runtime, done;
4018 // Check for non-function argument (including proxy).
4019 __ JumpIfSmi(v0, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004020 __ GetObjectType(v0, a1, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004021 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004022
Ben Murdoch257744e2011-11-30 15:57:28 +00004023 // InvokeFunction requires the function in a1. Move it in there.
4024 __ mov(a1, result_register());
4025 ParameterCount count(arg_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004026 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch257744e2011-11-30 15:57:28 +00004027 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004028 __ jmp(&done);
4029
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004030 __ bind(&runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004031 __ push(v0);
4032 __ CallRuntime(Runtime::kCall, args->length());
4033 __ bind(&done);
4034
Ben Murdoch257744e2011-11-30 15:57:28 +00004035 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01004036}
4037
4038
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004039void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004040 RegExpConstructResultStub stub(isolate());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004041 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004042 DCHECK(args->length() == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00004043 VisitForStackValue(args->at(0));
4044 VisitForStackValue(args->at(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004045 VisitForAccumulatorValue(args->at(2));
4046 __ mov(a0, result_register());
4047 __ pop(a1);
4048 __ pop(a2);
Ben Murdoch257744e2011-11-30 15:57:28 +00004049 __ CallStub(&stub);
4050 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01004051}
4052
4053
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004054void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4055 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004056 DCHECK_EQ(2, args->length());
Ben Murdoch257744e2011-11-30 15:57:28 +00004057
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004058 DCHECK_NE(NULL, args->at(0)->AsLiteral());
4059 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
Ben Murdoch257744e2011-11-30 15:57:28 +00004060
4061 Handle<FixedArray> jsfunction_result_caches(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004062 isolate()->native_context()->jsfunction_result_caches());
Ben Murdoch257744e2011-11-30 15:57:28 +00004063 if (jsfunction_result_caches->length() <= cache_id) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004064 __ Abort(kAttemptToUseUndefinedCache);
Ben Murdoch257744e2011-11-30 15:57:28 +00004065 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4066 context()->Plug(v0);
4067 return;
4068 }
4069
4070 VisitForAccumulatorValue(args->at(1));
4071
4072 Register key = v0;
4073 Register cache = a1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004074 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4075 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004076 __ lw(cache,
4077 ContextOperand(
4078 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4079 __ lw(cache,
4080 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4081
4082
4083 Label done, not_found;
Ben Murdoch589d6972011-11-30 16:04:58 +00004084 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004085 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4086 // a2 now holds finger offset as a smi.
4087 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4088 // a3 now points to the start of fixed array elements.
4089 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
4090 __ addu(a3, a3, at);
4091 // a3 now points to key of indexed element of cache.
4092 __ lw(a2, MemOperand(a3));
4093 __ Branch(&not_found, ne, key, Operand(a2));
4094
4095 __ lw(v0, MemOperand(a3, kPointerSize));
4096 __ Branch(&done);
4097
4098 __ bind(&not_found);
4099 // Call runtime to perform the lookup.
4100 __ Push(cache, key);
4101 __ CallRuntime(Runtime::kGetFromCache, 2);
4102
4103 __ bind(&done);
4104 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01004105}
4106
4107
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004108void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4109 ZoneList<Expression*>* args = expr->arguments();
Ben Murdoch257744e2011-11-30 15:57:28 +00004110 VisitForAccumulatorValue(args->at(0));
4111
4112 Label materialize_true, materialize_false;
4113 Label* if_true = NULL;
4114 Label* if_false = NULL;
4115 Label* fall_through = NULL;
4116 context()->PrepareTest(&materialize_true, &materialize_false,
4117 &if_true, &if_false, &fall_through);
4118
4119 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4120 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4121
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004122 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00004123 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4124
4125 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01004126}
4127
4128
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004129void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4130 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004131 DCHECK(args->length() == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004132 VisitForAccumulatorValue(args->at(0));
4133
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004134 __ AssertString(v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00004135
4136 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4137 __ IndexFromHash(v0, v0);
4138
4139 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01004140}
4141
4142
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004143void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004144 Label bailout, done, one_char_separator, long_separator,
4145 non_trivial_array, not_size_one_array, loop,
4146 empty_separator_loop, one_char_separator_loop,
4147 one_char_separator_loop_entry, long_separator_loop;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004148 ZoneList<Expression*>* args = expr->arguments();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004149 DCHECK(args->length() == 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00004150 VisitForStackValue(args->at(1));
4151 VisitForAccumulatorValue(args->at(0));
4152
4153 // All aliases of the same register have disjoint lifetimes.
4154 Register array = v0;
4155 Register elements = no_reg; // Will be v0.
4156 Register result = no_reg; // Will be v0.
4157 Register separator = a1;
4158 Register array_length = a2;
4159 Register result_pos = no_reg; // Will be a2.
4160 Register string_length = a3;
4161 Register string = t0;
4162 Register element = t1;
4163 Register elements_end = t2;
4164 Register scratch1 = t3;
4165 Register scratch2 = t5;
4166 Register scratch3 = t4;
Ben Murdoch257744e2011-11-30 15:57:28 +00004167
4168 // Separator operand is on the stack.
4169 __ pop(separator);
4170
4171 // Check that the array is a JSArray.
4172 __ JumpIfSmi(array, &bailout);
4173 __ GetObjectType(array, scratch1, scratch2);
4174 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4175
4176 // Check that the array has fast elements.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004177 __ CheckFastElements(scratch1, scratch2, &bailout);
Ben Murdoch257744e2011-11-30 15:57:28 +00004178
4179 // If the array has length zero, return the empty string.
4180 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4181 __ SmiUntag(array_length);
4182 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004183 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00004184 __ Branch(&done);
4185
4186 __ bind(&non_trivial_array);
4187
4188 // Get the FixedArray containing array's elements.
4189 elements = array;
4190 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4191 array = no_reg; // End of array's live range.
4192
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004193 // Check that all array elements are sequential one-byte strings, and
Ben Murdoch257744e2011-11-30 15:57:28 +00004194 // accumulate the sum of their lengths, as a smi-encoded value.
4195 __ mov(string_length, zero_reg);
4196 __ Addu(element,
4197 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4198 __ sll(elements_end, array_length, kPointerSizeLog2);
4199 __ Addu(elements_end, element, elements_end);
4200 // Loop condition: while (element < elements_end).
4201 // Live values in registers:
4202 // elements: Fixed array of strings.
4203 // array_length: Length of the fixed array of strings (not smi)
4204 // separator: Separator string
4205 // string_length: Accumulated sum of string lengths (smi).
4206 // element: Current array element.
4207 // elements_end: Array end.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004208 if (generate_debug_code_) {
4209 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4210 Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00004211 }
4212 __ bind(&loop);
4213 __ lw(string, MemOperand(element));
4214 __ Addu(element, element, kPointerSize);
4215 __ JumpIfSmi(string, &bailout);
4216 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4217 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004218 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4219 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004220 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4221 __ BranchOnOverflow(&bailout, scratch3);
4222 __ Branch(&loop, lt, element, Operand(elements_end));
4223
4224 // If array_length is 1, return elements[0], a string.
4225 __ Branch(&not_size_one_array, ne, array_length, Operand(1));
4226 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4227 __ Branch(&done);
4228
4229 __ bind(&not_size_one_array);
4230
4231 // Live values in registers:
4232 // separator: Separator string
4233 // array_length: Length of the array.
4234 // string_length: Sum of string lengths (smi).
4235 // elements: FixedArray of strings.
4236
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004237 // Check that the separator is a flat one-byte string.
Ben Murdoch257744e2011-11-30 15:57:28 +00004238 __ JumpIfSmi(separator, &bailout);
4239 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4240 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004241 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
Ben Murdoch257744e2011-11-30 15:57:28 +00004242
4243 // Add (separator length times array_length) - separator length to the
4244 // string_length to get the length of the result string. array_length is not
4245 // smi but the other values are, so the result is a smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004246 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004247 __ Subu(string_length, string_length, Operand(scratch1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004248 __ Mul(scratch3, scratch2, array_length, scratch1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004249 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4250 // zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004251 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00004252 __ And(scratch3, scratch2, Operand(0x80000000));
4253 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4254 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4255 __ BranchOnOverflow(&bailout, scratch3);
4256 __ SmiUntag(string_length);
4257
4258 // Get first element in the array to free up the elements register to be used
4259 // for the result.
4260 __ Addu(element,
4261 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4262 result = elements; // End of live range for elements.
4263 elements = no_reg;
4264 // Live values in registers:
4265 // element: First array element
4266 // separator: Separator string
4267 // string_length: Length of result string (not smi)
4268 // array_length: Length of the array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004269 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4270 elements_end, &bailout);
Ben Murdoch257744e2011-11-30 15:57:28 +00004271 // Prepare for looping. Set up elements_end to end of the array. Set
4272 // result_pos to the position of the result where to write the first
4273 // character.
4274 __ sll(elements_end, array_length, kPointerSizeLog2);
4275 __ Addu(elements_end, element, elements_end);
4276 result_pos = array_length; // End of live range for array_length.
4277 array_length = no_reg;
4278 __ Addu(result_pos,
4279 result,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004280 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00004281
4282 // Check the length of the separator.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004283 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004284 __ li(at, Operand(Smi::FromInt(1)));
4285 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4286 __ Branch(&long_separator, gt, scratch1, Operand(at));
4287
4288 // Empty separator case.
4289 __ bind(&empty_separator_loop);
4290 // Live values in registers:
4291 // result_pos: the position to which we are currently copying characters.
4292 // element: Current array element.
4293 // elements_end: Array end.
4294
4295 // Copy next array element to the result.
4296 __ lw(string, MemOperand(element));
4297 __ Addu(element, element, kPointerSize);
4298 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4299 __ SmiUntag(string_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004300 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
Ben Murdoch257744e2011-11-30 15:57:28 +00004301 __ CopyBytes(string, result_pos, string_length, scratch1);
4302 // End while (element < elements_end).
4303 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004304 DCHECK(result.is(v0));
Ben Murdoch257744e2011-11-30 15:57:28 +00004305 __ Branch(&done);
4306
4307 // One-character separator case.
4308 __ bind(&one_char_separator);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004309 // Replace separator with its one-byte character value.
4310 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00004311 // Jump into the loop after the code that copies the separator, so the first
4312 // element is not preceded by a separator.
4313 __ jmp(&one_char_separator_loop_entry);
4314
4315 __ bind(&one_char_separator_loop);
4316 // Live values in registers:
4317 // result_pos: the position to which we are currently copying characters.
4318 // element: Current array element.
4319 // elements_end: Array end.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004320 // separator: Single separator one-byte char (in lower byte).
Ben Murdoch257744e2011-11-30 15:57:28 +00004321
4322 // Copy the separator character to the result.
4323 __ sb(separator, MemOperand(result_pos));
4324 __ Addu(result_pos, result_pos, 1);
4325
4326 // Copy next array element to the result.
4327 __ bind(&one_char_separator_loop_entry);
4328 __ lw(string, MemOperand(element));
4329 __ Addu(element, element, kPointerSize);
4330 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4331 __ SmiUntag(string_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004332 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
Ben Murdoch257744e2011-11-30 15:57:28 +00004333 __ CopyBytes(string, result_pos, string_length, scratch1);
4334 // End while (element < elements_end).
4335 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004336 DCHECK(result.is(v0));
Ben Murdoch257744e2011-11-30 15:57:28 +00004337 __ Branch(&done);
4338
4339 // Long separator case (separator is more than one character). Entry is at the
4340 // label long_separator below.
4341 __ bind(&long_separator_loop);
4342 // Live values in registers:
4343 // result_pos: the position to which we are currently copying characters.
4344 // element: Current array element.
4345 // elements_end: Array end.
4346 // separator: Separator string.
4347
4348 // Copy the separator to the result.
4349 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4350 __ SmiUntag(string_length);
4351 __ Addu(string,
4352 separator,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004353 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00004354 __ CopyBytes(string, result_pos, string_length, scratch1);
4355
4356 __ bind(&long_separator);
4357 __ lw(string, MemOperand(element));
4358 __ Addu(element, element, kPointerSize);
4359 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4360 __ SmiUntag(string_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004361 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
Ben Murdoch257744e2011-11-30 15:57:28 +00004362 __ CopyBytes(string, result_pos, string_length, scratch1);
4363 // End while (element < elements_end).
4364 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004365 DCHECK(result.is(v0));
Ben Murdoch257744e2011-11-30 15:57:28 +00004366 __ Branch(&done);
4367
4368 __ bind(&bailout);
4369 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4370 __ bind(&done);
4371 context()->Plug(v0);
Steve Block44f0eee2011-05-26 01:26:41 +01004372}
4373
4374
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004375void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4376 DCHECK(expr->arguments()->length() == 0);
4377 ExternalReference debug_is_active =
4378 ExternalReference::debug_is_active_address(isolate());
4379 __ li(at, Operand(debug_is_active));
4380 __ lb(v0, MemOperand(at));
4381 __ SmiTag(v0);
4382 context()->Plug(v0);
4383}
4384
4385
Andrei Popescu31002712010-02-23 13:46:05 +00004386void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004387 if (expr->function() != NULL &&
4388 expr->function()->intrinsic_type == Runtime::INLINE) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004389 Comment cmnt(masm_, "[ InlineRuntimeCall");
4390 EmitInlineRuntimeCall(expr);
4391 return;
4392 }
4393
4394 Comment cmnt(masm_, "[ CallRuntime");
4395 ZoneList<Expression*>* args = expr->arguments();
Ben Murdoch257744e2011-11-30 15:57:28 +00004396 int arg_count = args->length();
Ben Murdoch257744e2011-11-30 15:57:28 +00004397
4398 if (expr->is_jsruntime()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004399 // Push the builtins object as the receiver.
4400 Register receiver = LoadDescriptor::ReceiverRegister();
4401 __ lw(receiver, GlobalObjectOperand());
4402 __ lw(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4403 __ push(receiver);
4404
4405 // Load the function from the receiver.
4406 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4407 if (FLAG_vector_ics) {
4408 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004409 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004410 CallLoadIC(NOT_CONTEXTUAL);
4411 } else {
4412 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4413 }
4414
4415 // Push the target function under the receiver.
4416 __ lw(at, MemOperand(sp, 0));
4417 __ push(at);
4418 __ sw(v0, MemOperand(sp, kPointerSize));
4419
4420 // Push the arguments ("left-to-right").
4421 int arg_count = args->length();
4422 for (int i = 0; i < arg_count; i++) {
4423 VisitForStackValue(args->at(i));
4424 }
4425
4426 // Record source position of the IC call.
4427 SetSourcePosition(expr->position());
4428 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4429 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4430 __ CallStub(&stub);
4431
Ben Murdoch257744e2011-11-30 15:57:28 +00004432 // Restore context register.
4433 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004434
4435 context()->DropAndPlug(1, v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00004436 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004437 // Push the arguments ("left-to-right").
4438 for (int i = 0; i < arg_count; i++) {
4439 VisitForStackValue(args->at(i));
4440 }
4441
Ben Murdoch257744e2011-11-30 15:57:28 +00004442 // Call the C runtime function.
4443 __ CallRuntime(expr->function(), arg_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004444 context()->Plug(v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00004445 }
Andrei Popescu31002712010-02-23 13:46:05 +00004446}
4447
4448
4449void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004450 switch (expr->op()) {
4451 case Token::DELETE: {
4452 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
Ben Murdoch589d6972011-11-30 16:04:58 +00004453 Property* property = expr->expression()->AsProperty();
4454 VariableProxy* proxy = expr->expression()->AsVariableProxy();
Ben Murdoch257744e2011-11-30 15:57:28 +00004455
Ben Murdoch589d6972011-11-30 16:04:58 +00004456 if (property != NULL) {
4457 VisitForStackValue(property->obj());
4458 VisitForStackValue(property->key());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004459 __ li(a1, Operand(Smi::FromInt(strict_mode())));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004460 __ push(a1);
4461 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4462 context()->Plug(v0);
Ben Murdoch589d6972011-11-30 16:04:58 +00004463 } else if (proxy != NULL) {
4464 Variable* var = proxy->var();
Ben Murdoch257744e2011-11-30 15:57:28 +00004465 // Delete of an unqualified identifier is disallowed in strict mode
Ben Murdoch589d6972011-11-30 16:04:58 +00004466 // but "delete this" is allowed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004467 DCHECK(strict_mode() == SLOPPY || var->is_this());
Ben Murdoch589d6972011-11-30 16:04:58 +00004468 if (var->IsUnallocated()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004469 __ lw(a2, GlobalObjectOperand());
4470 __ li(a1, Operand(var->name()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004471 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
Ben Murdoch257744e2011-11-30 15:57:28 +00004472 __ Push(a2, a1, a0);
4473 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4474 context()->Plug(v0);
Ben Murdoch589d6972011-11-30 16:04:58 +00004475 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004476 // Result of deleting non-global, non-dynamic variables is false.
4477 // The subexpression does not have side effects.
Ben Murdoch589d6972011-11-30 16:04:58 +00004478 context()->Plug(var->is_this());
Ben Murdoch257744e2011-11-30 15:57:28 +00004479 } else {
4480 // Non-global variable. Call the runtime to try to delete from the
4481 // context where the variable was introduced.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004482 DCHECK(!context_register().is(a2));
Ben Murdoch257744e2011-11-30 15:57:28 +00004483 __ li(a2, Operand(var->name()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004484 __ Push(context_register(), a2);
4485 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00004486 context()->Plug(v0);
4487 }
4488 } else {
4489 // Result of deleting non-property, non-variable reference is true.
4490 // The subexpression may have side effects.
4491 VisitForEffect(expr->expression());
4492 context()->Plug(true);
4493 }
4494 break;
4495 }
4496
4497 case Token::VOID: {
4498 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4499 VisitForEffect(expr->expression());
4500 context()->Plug(Heap::kUndefinedValueRootIndex);
4501 break;
4502 }
4503
4504 case Token::NOT: {
4505 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4506 if (context()->IsEffect()) {
4507 // Unary NOT has no side effects so it's only necessary to visit the
4508 // subexpression. Match the optimizing compiler by not branching.
4509 VisitForEffect(expr->expression());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004510 } else if (context()->IsTest()) {
4511 const TestContext* test = TestContext::cast(context());
4512 // The labels are swapped for the recursive call.
4513 VisitForControl(expr->expression(),
4514 test->false_label(),
4515 test->true_label(),
4516 test->fall_through());
4517 context()->Plug(test->true_label(), test->false_label());
Ben Murdoch257744e2011-11-30 15:57:28 +00004518 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004519 // We handle value contexts explicitly rather than simply visiting
4520 // for control and plugging the control flow into the context,
4521 // because we need to prepare a pair of extra administrative AST ids
4522 // for the optimizing compiler.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004523 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004524 Label materialize_true, materialize_false, done;
4525 VisitForControl(expr->expression(),
4526 &materialize_false,
4527 &materialize_true,
4528 &materialize_true);
4529 __ bind(&materialize_true);
4530 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4531 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4532 if (context()->IsStackValue()) __ push(v0);
4533 __ jmp(&done);
4534 __ bind(&materialize_false);
4535 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4536 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4537 if (context()->IsStackValue()) __ push(v0);
4538 __ bind(&done);
Ben Murdoch257744e2011-11-30 15:57:28 +00004539 }
4540 break;
4541 }
4542
4543 case Token::TYPEOF: {
4544 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4545 { StackValueContext context(this);
4546 VisitForTypeofValue(expr->expression());
4547 }
4548 __ CallRuntime(Runtime::kTypeof, 1);
4549 context()->Plug(v0);
4550 break;
4551 }
4552
Ben Murdoch257744e2011-11-30 15:57:28 +00004553 default:
4554 UNREACHABLE();
4555 }
4556}
4557
4558
Andrei Popescu31002712010-02-23 13:46:05 +00004559void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004560 DCHECK(expr->expression()->IsValidReferenceExpression());
4561
Ben Murdoch257744e2011-11-30 15:57:28 +00004562 Comment cmnt(masm_, "[ CountOperation");
4563 SetSourcePosition(expr->position());
4564
Ben Murdoch257744e2011-11-30 15:57:28 +00004565 Property* prop = expr->expression()->AsProperty();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004566 LhsKind assign_type = GetAssignType(prop);
Ben Murdoch257744e2011-11-30 15:57:28 +00004567
4568 // Evaluate expression and get value.
4569 if (assign_type == VARIABLE) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004570 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
Ben Murdoch257744e2011-11-30 15:57:28 +00004571 AccumulatorValueContext context(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004572 EmitVariableLoad(expr->expression()->AsVariableProxy());
Ben Murdoch257744e2011-11-30 15:57:28 +00004573 } else {
4574 // Reserve space for result of postfix operation.
4575 if (expr->is_postfix() && !context()->IsEffect()) {
4576 __ li(at, Operand(Smi::FromInt(0)));
4577 __ push(at);
4578 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004579 switch (assign_type) {
4580 case NAMED_PROPERTY: {
4581 // Put the object both on the stack and in the register.
4582 VisitForStackValue(prop->obj());
4583 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4584 EmitNamedPropertyLoad(prop);
4585 break;
4586 }
4587
4588 case NAMED_SUPER_PROPERTY: {
4589 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4590 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4591 __ Push(result_register());
4592 const Register scratch = a1;
4593 __ lw(scratch, MemOperand(sp, kPointerSize));
4594 __ Push(scratch, result_register());
4595 EmitNamedSuperPropertyLoad(prop);
4596 break;
4597 }
4598
4599 case KEYED_SUPER_PROPERTY: {
4600 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4601 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4602 const Register scratch = a1;
4603 const Register scratch1 = t0;
4604 __ Move(scratch, result_register());
4605 VisitForAccumulatorValue(prop->key());
4606 __ Push(scratch, result_register());
4607 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
4608 __ Push(scratch1, scratch, result_register());
4609 EmitKeyedSuperPropertyLoad(prop);
4610 break;
4611 }
4612
4613 case KEYED_PROPERTY: {
4614 VisitForStackValue(prop->obj());
4615 VisitForStackValue(prop->key());
4616 __ lw(LoadDescriptor::ReceiverRegister(),
4617 MemOperand(sp, 1 * kPointerSize));
4618 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4619 EmitKeyedPropertyLoad(prop);
4620 break;
4621 }
4622
4623 case VARIABLE:
4624 UNREACHABLE();
Ben Murdoch257744e2011-11-30 15:57:28 +00004625 }
4626 }
4627
4628 // We need a second deoptimization point after loading the value
4629 // in case evaluating the property load my have a side effect.
4630 if (assign_type == VARIABLE) {
4631 PrepareForBailout(expr->expression(), TOS_REG);
4632 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004633 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
Ben Murdoch257744e2011-11-30 15:57:28 +00004634 }
4635
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004636 // Inline smi case if we are in a loop.
4637 Label stub_call, done;
4638 JumpPatchSite patch_site(masm_);
4639
4640 int count_value = expr->op() == Token::INC ? 1 : -1;
Ben Murdoch257744e2011-11-30 15:57:28 +00004641 __ mov(a0, v0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004642 if (ShouldInlineSmiCase(expr->op())) {
4643 Label slow;
4644 patch_site.EmitJumpIfNotSmi(v0, &slow);
4645
4646 // Save result for postfix expressions.
4647 if (expr->is_postfix()) {
4648 if (!context()->IsEffect()) {
4649 // Save the result on the stack. If we have a named or keyed property
4650 // we store the result under the receiver that is currently on top
4651 // of the stack.
4652 switch (assign_type) {
4653 case VARIABLE:
4654 __ push(v0);
4655 break;
4656 case NAMED_PROPERTY:
4657 __ sw(v0, MemOperand(sp, kPointerSize));
4658 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004659 case NAMED_SUPER_PROPERTY:
4660 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4661 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004662 case KEYED_PROPERTY:
4663 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4664 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004665 case KEYED_SUPER_PROPERTY:
4666 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4667 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004668 }
4669 }
4670 }
4671
4672 Register scratch1 = a1;
4673 Register scratch2 = t0;
4674 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4675 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4676 __ BranchOnNoOverflow(&done, scratch2);
4677 // Call stub. Undo operation first.
4678 __ Move(v0, a0);
4679 __ jmp(&stub_call);
4680 __ bind(&slow);
4681 }
4682 ToNumberStub convert_stub(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00004683 __ CallStub(&convert_stub);
Ben Murdoch257744e2011-11-30 15:57:28 +00004684
4685 // Save result for postfix expressions.
4686 if (expr->is_postfix()) {
4687 if (!context()->IsEffect()) {
4688 // Save the result on the stack. If we have a named or keyed property
4689 // we store the result under the receiver that is currently on top
4690 // of the stack.
4691 switch (assign_type) {
4692 case VARIABLE:
4693 __ push(v0);
4694 break;
4695 case NAMED_PROPERTY:
4696 __ sw(v0, MemOperand(sp, kPointerSize));
4697 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004698 case NAMED_SUPER_PROPERTY:
4699 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4700 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00004701 case KEYED_PROPERTY:
4702 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4703 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004704 case KEYED_SUPER_PROPERTY:
4705 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
4706 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00004707 }
4708 }
4709 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004710
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004711 __ bind(&stub_call);
4712 __ mov(a1, v0);
4713 __ li(a0, Operand(Smi::FromInt(count_value)));
Ben Murdoch257744e2011-11-30 15:57:28 +00004714
4715 // Record position before stub call.
4716 SetSourcePosition(expr->position());
4717
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004718 Handle<Code> code =
4719 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4720 CallIC(code, expr->CountBinOpFeedbackId());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004721 patch_site.EmitPatchInfo();
Ben Murdoch257744e2011-11-30 15:57:28 +00004722 __ bind(&done);
4723
4724 // Store the value returned in v0.
4725 switch (assign_type) {
4726 case VARIABLE:
4727 if (expr->is_postfix()) {
4728 { EffectContext context(this);
4729 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4730 Token::ASSIGN);
4731 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4732 context.Plug(v0);
4733 }
4734 // For all contexts except EffectConstant we have the result on
4735 // top of the stack.
4736 if (!context()->IsEffect()) {
4737 context()->PlugTOS();
4738 }
4739 } else {
4740 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4741 Token::ASSIGN);
4742 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4743 context()->Plug(v0);
4744 }
4745 break;
4746 case NAMED_PROPERTY: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004747 __ mov(StoreDescriptor::ValueRegister(), result_register());
4748 __ li(StoreDescriptor::NameRegister(),
4749 Operand(prop->key()->AsLiteral()->value()));
4750 __ pop(StoreDescriptor::ReceiverRegister());
4751 CallStoreIC(expr->CountStoreFeedbackId());
Ben Murdoch257744e2011-11-30 15:57:28 +00004752 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4753 if (expr->is_postfix()) {
4754 if (!context()->IsEffect()) {
4755 context()->PlugTOS();
4756 }
4757 } else {
4758 context()->Plug(v0);
4759 }
4760 break;
4761 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004762 case NAMED_SUPER_PROPERTY: {
4763 EmitNamedSuperPropertyStore(prop);
4764 if (expr->is_postfix()) {
4765 if (!context()->IsEffect()) {
4766 context()->PlugTOS();
4767 }
4768 } else {
4769 context()->Plug(v0);
4770 }
4771 break;
4772 }
4773 case KEYED_SUPER_PROPERTY: {
4774 EmitKeyedSuperPropertyStore(prop);
4775 if (expr->is_postfix()) {
4776 if (!context()->IsEffect()) {
4777 context()->PlugTOS();
4778 }
4779 } else {
4780 context()->Plug(v0);
4781 }
4782 break;
4783 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004784 case KEYED_PROPERTY: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004785 __ mov(StoreDescriptor::ValueRegister(), result_register());
4786 __ Pop(StoreDescriptor::ReceiverRegister(),
4787 StoreDescriptor::NameRegister());
4788 Handle<Code> ic =
4789 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4790 CallIC(ic, expr->CountStoreFeedbackId());
Ben Murdoch257744e2011-11-30 15:57:28 +00004791 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4792 if (expr->is_postfix()) {
4793 if (!context()->IsEffect()) {
4794 context()->PlugTOS();
4795 }
4796 } else {
4797 context()->Plug(v0);
4798 }
4799 break;
4800 }
4801 }
Andrei Popescu31002712010-02-23 13:46:05 +00004802}
4803
4804
Steve Block44f0eee2011-05-26 01:26:41 +01004805void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004806 DCHECK(!context()->IsEffect());
4807 DCHECK(!context()->IsTest());
Ben Murdoch257744e2011-11-30 15:57:28 +00004808 VariableProxy* proxy = expr->AsVariableProxy();
Ben Murdoch589d6972011-11-30 16:04:58 +00004809 if (proxy != NULL && proxy->var()->IsUnallocated()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004810 Comment cmnt(masm_, "[ Global variable");
4811 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4812 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4813 if (FLAG_vector_ics) {
4814 __ li(VectorLoadICDescriptor::SlotRegister(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004815 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004816 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004817 // Use a regular load, not a contextual load, to avoid a reference
4818 // error.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004819 CallLoadIC(NOT_CONTEXTUAL);
Ben Murdoch257744e2011-11-30 15:57:28 +00004820 PrepareForBailout(expr, TOS_REG);
4821 context()->Plug(v0);
Ben Murdoch589d6972011-11-30 16:04:58 +00004822 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004823 Comment cmnt(masm_, "[ Lookup slot");
Ben Murdoch257744e2011-11-30 15:57:28 +00004824 Label done, slow;
4825
4826 // Generate code for loading from variables potentially shadowed
4827 // by eval-introduced variables.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004828 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00004829
4830 __ bind(&slow);
4831 __ li(a0, Operand(proxy->name()));
4832 __ Push(cp, a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004833 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00004834 PrepareForBailout(expr, TOS_REG);
4835 __ bind(&done);
4836
4837 context()->Plug(v0);
4838 } else {
4839 // This expression cannot throw a reference error at the top level.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004840 VisitInDuplicateContext(expr);
Ben Murdoch257744e2011-11-30 15:57:28 +00004841 }
Andrei Popescu31002712010-02-23 13:46:05 +00004842}
4843
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004844void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004845 Expression* sub_expr,
4846 Handle<String> check) {
4847 Label materialize_true, materialize_false;
4848 Label* if_true = NULL;
4849 Label* if_false = NULL;
4850 Label* fall_through = NULL;
4851 context()->PrepareTest(&materialize_true, &materialize_false,
4852 &if_true, &if_false, &fall_through);
4853
Ben Murdoch257744e2011-11-30 15:57:28 +00004854 { AccumulatorValueContext context(this);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004855 VisitForTypeofValue(sub_expr);
Ben Murdoch257744e2011-11-30 15:57:28 +00004856 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004857 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00004858
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004859 Factory* factory = isolate()->factory();
4860 if (String::Equals(check, factory->number_string())) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004861 __ JumpIfSmi(v0, if_true);
4862 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4863 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4864 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004865 } else if (String::Equals(check, factory->string_string())) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004866 __ JumpIfSmi(v0, if_false);
4867 // Check for undetectable objects => false.
4868 __ GetObjectType(v0, v0, a1);
4869 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4870 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4871 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4872 Split(eq, a1, Operand(zero_reg),
4873 if_true, if_false, fall_through);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004874 } else if (String::Equals(check, factory->symbol_string())) {
4875 __ JumpIfSmi(v0, if_false);
4876 __ GetObjectType(v0, v0, a1);
4877 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4878 } else if (String::Equals(check, factory->boolean_string())) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004879 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4880 __ Branch(if_true, eq, v0, Operand(at));
4881 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4882 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004883 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004884 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4885 __ Branch(if_true, eq, v0, Operand(at));
4886 __ JumpIfSmi(v0, if_false);
4887 // Check for undetectable objects => true.
4888 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4889 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4890 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4891 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004892 } else if (String::Equals(check, factory->function_string())) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004893 __ JumpIfSmi(v0, if_false);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004894 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4895 __ GetObjectType(v0, v0, a1);
4896 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4897 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4898 if_true, if_false, fall_through);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004899 } else if (String::Equals(check, factory->object_string())) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004900 __ JumpIfSmi(v0, if_false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004901 __ LoadRoot(at, Heap::kNullValueRootIndex);
4902 __ Branch(if_true, eq, v0, Operand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +00004903 // Check for JS objects => true.
4904 __ GetObjectType(v0, v0, a1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004905 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00004906 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004907 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00004908 // Check for undetectable objects => false.
4909 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4910 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4911 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4912 } else {
4913 if (if_false != fall_through) __ jmp(if_false);
4914 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004915 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01004916}
4917
4918
Andrei Popescu31002712010-02-23 13:46:05 +00004919void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004920 Comment cmnt(masm_, "[ CompareOperation");
4921 SetSourcePosition(expr->position());
4922
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004923 // First we try a fast inlined version of the compare when one of
4924 // the operands is a literal.
4925 if (TryLiteralCompare(expr)) return;
4926
Ben Murdoch257744e2011-11-30 15:57:28 +00004927 // Always perform the comparison for its control flow. Pack the result
4928 // into the expression's context after the comparison is performed.
Ben Murdoch257744e2011-11-30 15:57:28 +00004929 Label materialize_true, materialize_false;
4930 Label* if_true = NULL;
4931 Label* if_false = NULL;
4932 Label* fall_through = NULL;
4933 context()->PrepareTest(&materialize_true, &materialize_false,
4934 &if_true, &if_false, &fall_through);
4935
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004936 Token::Value op = expr->op();
Ben Murdoch257744e2011-11-30 15:57:28 +00004937 VisitForStackValue(expr->left());
4938 switch (op) {
4939 case Token::IN:
4940 VisitForStackValue(expr->right());
4941 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004942 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
Ben Murdoch257744e2011-11-30 15:57:28 +00004943 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4944 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4945 break;
4946
4947 case Token::INSTANCEOF: {
4948 VisitForStackValue(expr->right());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004949 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
Ben Murdoch257744e2011-11-30 15:57:28 +00004950 __ CallStub(&stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004951 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00004952 // The stub returns 0 for true.
4953 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4954 break;
4955 }
4956
4957 default: {
4958 VisitForAccumulatorValue(expr->right());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004959 Condition cc = CompareIC::ComputeCondition(op);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004960 __ mov(a0, result_register());
4961 __ pop(a1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004962
4963 bool inline_smi_code = ShouldInlineSmiCase(op);
4964 JumpPatchSite patch_site(masm_);
4965 if (inline_smi_code) {
4966 Label slow_case;
4967 __ Or(a2, a0, Operand(a1));
4968 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4969 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4970 __ bind(&slow_case);
4971 }
4972 // Record position and call the compare IC.
4973 SetSourcePosition(expr->position());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004974 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4975 CallIC(ic, expr->CompareOperationFeedbackId());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004976 patch_site.EmitPatchInfo();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004977 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00004978 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4979 }
4980 }
4981
4982 // Convert the result of the comparison into one expected for this
4983 // expression's context.
4984 context()->Plug(if_true, if_false);
Andrei Popescu31002712010-02-23 13:46:05 +00004985}
4986
4987
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004988void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4989 Expression* sub_expr,
4990 NilValue nil) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004991 Label materialize_true, materialize_false;
4992 Label* if_true = NULL;
4993 Label* if_false = NULL;
4994 Label* fall_through = NULL;
4995 context()->PrepareTest(&materialize_true, &materialize_false,
4996 &if_true, &if_false, &fall_through);
4997
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004998 VisitForAccumulatorValue(sub_expr);
4999 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Ben Murdoch257744e2011-11-30 15:57:28 +00005000 __ mov(a0, result_register());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005001 if (expr->op() == Token::EQ_STRICT) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005002 Heap::RootListIndex nil_value = nil == kNullValue ?
5003 Heap::kNullValueRootIndex :
5004 Heap::kUndefinedValueRootIndex;
5005 __ LoadRoot(a1, nil_value);
Ben Murdoch257744e2011-11-30 15:57:28 +00005006 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5007 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005008 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5009 CallIC(ic, expr->CompareOperationFeedbackId());
5010 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
Ben Murdoch257744e2011-11-30 15:57:28 +00005011 }
5012 context()->Plug(if_true, if_false);
Steve Block44f0eee2011-05-26 01:26:41 +01005013}
5014
5015
Andrei Popescu31002712010-02-23 13:46:05 +00005016void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005017 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5018 context()->Plug(v0);
Andrei Popescu31002712010-02-23 13:46:05 +00005019}
5020
5021
Steve Block44f0eee2011-05-26 01:26:41 +01005022Register FullCodeGenerator::result_register() {
Steve Block44f0eee2011-05-26 01:26:41 +01005023 return v0;
5024}
Andrei Popescu31002712010-02-23 13:46:05 +00005025
5026
Steve Block44f0eee2011-05-26 01:26:41 +01005027Register FullCodeGenerator::context_register() {
Steve Block44f0eee2011-05-26 01:26:41 +01005028 return cp;
5029}
5030
5031
Andrei Popescu31002712010-02-23 13:46:05 +00005032void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005033 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
Ben Murdoch257744e2011-11-30 15:57:28 +00005034 __ sw(value, MemOperand(fp, frame_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00005035}
5036
5037
5038void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005039 __ lw(dst, ContextOperand(cp, context_index));
Andrei Popescu31002712010-02-23 13:46:05 +00005040}
5041
5042
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005043void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5044 Scope* declaration_scope = scope()->DeclarationScope();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005045 if (declaration_scope->is_script_scope() ||
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005046 declaration_scope->is_module_scope()) {
5047 // Contexts nested in the native context have a canonical empty function
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005048 // as their closure, not the anonymous closure containing the global
5049 // code. Pass a smi sentinel and let the runtime look up the empty
5050 // function.
5051 __ li(at, Operand(Smi::FromInt(0)));
5052 } else if (declaration_scope->is_eval_scope()) {
5053 // Contexts created by a call to eval have the same closure as the
5054 // context calling eval, not the anonymous closure containing the eval
5055 // code. Fetch it from the context.
5056 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5057 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005058 DCHECK(declaration_scope->is_function_scope());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005059 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5060 }
5061 __ push(at);
5062}
5063
5064
Andrei Popescu31002712010-02-23 13:46:05 +00005065// ----------------------------------------------------------------------------
5066// Non-local control flow support.
5067
5068void FullCodeGenerator::EnterFinallyBlock() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005069 DCHECK(!result_register().is(a1));
Ben Murdoch257744e2011-11-30 15:57:28 +00005070 // Store result register while executing finally block.
5071 __ push(result_register());
5072 // Cook return address in link register to stack (smi encoded Code* delta).
5073 __ Subu(a1, ra, Operand(masm_->CodeObject()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005074 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
Ben Murdoch589d6972011-11-30 16:04:58 +00005075 STATIC_ASSERT(0 == kSmiTag);
Ben Murdoch257744e2011-11-30 15:57:28 +00005076 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005077
5078 // Store result register while executing finally block.
5079 __ push(a1);
5080
5081 // Store pending message while executing finally block.
5082 ExternalReference pending_message_obj =
5083 ExternalReference::address_of_pending_message_obj(isolate());
5084 __ li(at, Operand(pending_message_obj));
5085 __ lw(a1, MemOperand(at));
5086 __ push(a1);
5087
5088 ExternalReference has_pending_message =
5089 ExternalReference::address_of_has_pending_message(isolate());
5090 __ li(at, Operand(has_pending_message));
5091 __ lw(a1, MemOperand(at));
5092 __ SmiTag(a1);
5093 __ push(a1);
5094
5095 ExternalReference pending_message_script =
5096 ExternalReference::address_of_pending_message_script(isolate());
5097 __ li(at, Operand(pending_message_script));
5098 __ lw(a1, MemOperand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +00005099 __ push(a1);
Andrei Popescu31002712010-02-23 13:46:05 +00005100}
5101
5102
5103void FullCodeGenerator::ExitFinallyBlock() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005104 DCHECK(!result_register().is(a1));
5105 // Restore pending message from stack.
5106 __ pop(a1);
5107 ExternalReference pending_message_script =
5108 ExternalReference::address_of_pending_message_script(isolate());
5109 __ li(at, Operand(pending_message_script));
5110 __ sw(a1, MemOperand(at));
5111
5112 __ pop(a1);
5113 __ SmiUntag(a1);
5114 ExternalReference has_pending_message =
5115 ExternalReference::address_of_has_pending_message(isolate());
5116 __ li(at, Operand(has_pending_message));
5117 __ sw(a1, MemOperand(at));
5118
5119 __ pop(a1);
5120 ExternalReference pending_message_obj =
5121 ExternalReference::address_of_pending_message_obj(isolate());
5122 __ li(at, Operand(pending_message_obj));
5123 __ sw(a1, MemOperand(at));
5124
Ben Murdoch257744e2011-11-30 15:57:28 +00005125 // Restore result register from stack.
5126 __ pop(a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005127
Ben Murdoch257744e2011-11-30 15:57:28 +00005128 // Uncook return address and return.
5129 __ pop(result_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005130 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00005131 __ sra(a1, a1, 1); // Un-smi-tag value.
5132 __ Addu(at, a1, Operand(masm_->CodeObject()));
5133 __ Jump(at);
Andrei Popescu31002712010-02-23 13:46:05 +00005134}
5135
5136
5137#undef __
5138
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005139#define __ ACCESS_MASM(masm())
5140
5141FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5142 int* stack_depth,
5143 int* context_length) {
5144 // The macros used here must preserve the result register.
5145
5146 // Because the handler block contains the context of the finally
5147 // code, we can restore it directly from there for the finally code
5148 // rather than iteratively unwinding contexts via their previous
5149 // links.
5150 __ Drop(*stack_depth); // Down to the handler block.
5151 if (*context_length > 0) {
5152 // Restore the context to its dedicated register and the stack.
5153 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
5154 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5155 }
5156 __ PopTryHandler();
5157 __ Call(finally_entry_);
5158
5159 *stack_depth = 0;
5160 *context_length = 0;
5161 return previous_;
5162}
5163
5164
5165#undef __
5166
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005167
5168void BackEdgeTable::PatchAt(Code* unoptimized_code,
5169 Address pc,
5170 BackEdgeState target_state,
5171 Code* replacement_code) {
5172 static const int kInstrSize = Assembler::kInstrSize;
5173 Address branch_address = pc - 6 * kInstrSize;
5174 CodePatcher patcher(branch_address, 1);
5175
5176 switch (target_state) {
5177 case INTERRUPT:
5178 // slt at, a3, zero_reg (in case of count based interrupts)
5179 // beq at, zero_reg, ok
5180 // lui t9, <interrupt stub address> upper
5181 // ori t9, <interrupt stub address> lower
5182 // jalr t9
5183 // nop
5184 // ok-label ----- pc_after points here
5185 patcher.masm()->slt(at, a3, zero_reg);
5186 break;
5187 case ON_STACK_REPLACEMENT:
5188 case OSR_AFTER_STACK_CHECK:
5189 // addiu at, zero_reg, 1
5190 // beq at, zero_reg, ok ;; Not changed
5191 // lui t9, <on-stack replacement address> upper
5192 // ori t9, <on-stack replacement address> lower
5193 // jalr t9 ;; Not changed
5194 // nop ;; Not changed
5195 // ok-label ----- pc_after points here
5196 patcher.masm()->addiu(at, zero_reg, 1);
5197 break;
5198 }
5199 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5200 // Replace the stack check address in the load-immediate (lui/ori pair)
5201 // with the entry address of the replacement code.
5202 Assembler::set_target_address_at(pc_immediate_load_address,
5203 replacement_code->entry());
5204
5205 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5206 unoptimized_code, pc_immediate_load_address, replacement_code);
5207}
5208
5209
5210BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5211 Isolate* isolate,
5212 Code* unoptimized_code,
5213 Address pc) {
5214 static const int kInstrSize = Assembler::kInstrSize;
5215 Address branch_address = pc - 6 * kInstrSize;
5216 Address pc_immediate_load_address = pc - 4 * kInstrSize;
5217
5218 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
5219 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5220 DCHECK(reinterpret_cast<uint32_t>(
5221 Assembler::target_address_at(pc_immediate_load_address)) ==
5222 reinterpret_cast<uint32_t>(
5223 isolate->builtins()->InterruptCheck()->entry()));
5224 return INTERRUPT;
5225 }
5226
5227 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5228
5229 if (reinterpret_cast<uint32_t>(
5230 Assembler::target_address_at(pc_immediate_load_address)) ==
5231 reinterpret_cast<uint32_t>(
5232 isolate->builtins()->OnStackReplacement()->entry())) {
5233 return ON_STACK_REPLACEMENT;
5234 }
5235
5236 DCHECK(reinterpret_cast<uint32_t>(
5237 Assembler::target_address_at(pc_immediate_load_address)) ==
5238 reinterpret_cast<uint32_t>(
5239 isolate->builtins()->OsrAfterStackCheck()->entry()));
5240 return OSR_AFTER_STACK_CHECK;
5241}
5242
5243
Andrei Popescu31002712010-02-23 13:46:05 +00005244} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01005245
5246#endif // V8_TARGET_ARCH_MIPS