blob: 73e47502957e77daf3f74953578b750451fc25a8 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/arm/code-stubs-arm.h"
17#include "src/arm/macro-assembler-arm.h"
18
19namespace v8 {
20namespace internal {
21
Ben Murdoch097c5b22016-05-18 11:27:45 +010022#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30class JumpPatchSite BASE_EMBEDDED {
31 public:
32 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
33#ifdef DEBUG
34 info_emitted_ = false;
35#endif
36 }
37
38 ~JumpPatchSite() {
39 DCHECK(patch_site_.is_bound() == info_emitted_);
40 }
41
42 // When initially emitting this ensure that a jump is always generated to skip
43 // the inlined smi code.
44 void EmitJumpIfNotSmi(Register reg, Label* target) {
45 DCHECK(!patch_site_.is_bound() && !info_emitted_);
46 Assembler::BlockConstPoolScope block_const_pool(masm_);
47 __ bind(&patch_site_);
48 __ cmp(reg, Operand(reg));
49 __ b(eq, target); // Always taken before patched.
50 }
51
52 // When initially emitting this ensure that a jump is never generated to skip
53 // the inlined smi code.
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockConstPoolScope block_const_pool(masm_);
57 __ bind(&patch_site_);
58 __ cmp(reg, Operand(reg));
59 __ b(ne, target); // Never taken before patched.
60 }
61
62 void EmitPatchInfo() {
63 // Block literal pool emission whilst recording patch site information.
64 Assembler::BlockConstPoolScope block_const_pool(masm_);
65 if (patch_site_.is_bound()) {
66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
67 Register reg;
68 reg.set_code(delta_to_patch_site / kOff12Mask);
69 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
70#ifdef DEBUG
71 info_emitted_ = true;
72#endif
73 } else {
74 __ nop(); // Signals no inlined code.
75 }
76 }
77
78 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010079 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 MacroAssembler* masm_;
81 Label patch_site_;
82#ifdef DEBUG
83 bool info_emitted_;
84#endif
85};
86
87
88// Generate code for a JS function. On entry to the function the receiver
89// and arguments have been pushed on the stack left to right. The actual
90// argument count matches the formal parameter count expected by the
91// function.
92//
93// The live registers are:
94// o r1: the JS function object being called (i.e., ourselves)
95// o r3: the new target value
96// o cp: our context
97// o pp: our caller's constant pool pointer (if enabled)
98// o fp: our caller's frame pointer
99// o sp: stack pointer
100// o lr: return address
101//
102// The function builds a JS frame. Please see JavaScriptFrameConstants in
103// frames-arm.h for its layout.
104void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
115 __ ldr(r2, MemOperand(sp, receiver_offset));
116 __ AssertNotSmi(r2);
117 __ CompareObjectType(r2, r2, no_reg, FIRST_JS_RECEIVER_TYPE);
118 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119 }
120
121 // Open a frame scope to indicate that there is a frame on the stack. The
122 // MANUAL indicates that the scope shouldn't actually generate code to set up
123 // the frame (that is done below).
124 FrameScope frame_scope(masm_, StackFrame::MANUAL);
125
126 info->set_prologue_offset(masm_->pc_offset());
127 __ Prologue(info->GeneratePreagedPrologue());
128
129 { Comment cmnt(masm_, "[ Allocate locals");
130 int locals_count = info->scope()->num_stack_slots();
131 // Generators allocate locals, if any, in context slots.
132 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100133 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134 if (locals_count > 0) {
135 if (locals_count >= 128) {
136 Label ok;
137 __ sub(r9, sp, Operand(locals_count * kPointerSize));
138 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
139 __ cmp(r9, Operand(r2));
140 __ b(hs, &ok);
141 __ CallRuntime(Runtime::kThrowStackOverflow);
142 __ bind(&ok);
143 }
144 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
145 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
146 if (locals_count >= kMaxPushes) {
147 int loop_iterations = locals_count / kMaxPushes;
148 __ mov(r2, Operand(loop_iterations));
149 Label loop_header;
150 __ bind(&loop_header);
151 // Do pushes.
152 for (int i = 0; i < kMaxPushes; i++) {
153 __ push(r9);
154 }
155 // Continue loop if not done.
156 __ sub(r2, r2, Operand(1), SetCC);
157 __ b(&loop_header, ne);
158 }
159 int remaining = locals_count % kMaxPushes;
160 // Emit the remaining pushes.
161 for (int i = 0; i < remaining; i++) {
162 __ push(r9);
163 }
164 }
165 }
166
167 bool function_in_register_r1 = true;
168
169 // Possibly allocate a local context.
170 if (info->scope()->num_heap_slots() > 0) {
171 // Argument to NewContext is the function, which is still in r1.
172 Comment cmnt(masm_, "[ Allocate context");
173 bool need_write_barrier = true;
174 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
175 if (info->scope()->is_script_scope()) {
176 __ push(r1);
177 __ Push(info->scope()->GetScopeInfo(info->isolate()));
178 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100179 PrepareForBailoutForId(BailoutId::ScriptContext(),
180 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000181 // The new target value is not used, clobbering is safe.
182 DCHECK_NULL(info->scope()->new_target_var());
183 } else {
184 if (info->scope()->new_target_var() != nullptr) {
185 __ push(r3); // Preserve new target.
186 }
187 if (slots <= FastNewContextStub::kMaximumSlots) {
188 FastNewContextStub stub(isolate(), slots);
189 __ CallStub(&stub);
190 // Result of FastNewContextStub is always in new space.
191 need_write_barrier = false;
192 } else {
193 __ push(r1);
194 __ CallRuntime(Runtime::kNewFunctionContext);
195 }
196 if (info->scope()->new_target_var() != nullptr) {
197 __ pop(r3); // Preserve new target.
198 }
199 }
200 function_in_register_r1 = false;
201 // Context is returned in r0. It replaces the context passed to us.
202 // It's saved in the stack and kept live in cp.
203 __ mov(cp, r0);
204 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
205 // Copy any necessary parameters into the context.
206 int num_parameters = info->scope()->num_parameters();
207 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
208 for (int i = first_parameter; i < num_parameters; i++) {
209 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
210 if (var->IsContextSlot()) {
211 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
212 (num_parameters - 1 - i) * kPointerSize;
213 // Load parameter from stack.
214 __ ldr(r0, MemOperand(fp, parameter_offset));
215 // Store it in the context.
216 MemOperand target = ContextMemOperand(cp, var->index());
217 __ str(r0, target);
218
219 // Update the write barrier.
220 if (need_write_barrier) {
221 __ RecordWriteContextSlot(cp, target.offset(), r0, r2,
222 kLRHasBeenSaved, kDontSaveFPRegs);
223 } else if (FLAG_debug_code) {
224 Label done;
225 __ JumpIfInNewSpace(cp, r0, &done);
226 __ Abort(kExpectedNewSpaceObject);
227 __ bind(&done);
228 }
229 }
230 }
231 }
232
233 // Register holding this function and new target are both trashed in case we
234 // bailout here. But since that can happen only when new target is not used
235 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100236 PrepareForBailoutForId(BailoutId::FunctionContext(),
237 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000238
239 // Possibly set up a local binding to the this function which is used in
240 // derived constructors with super calls.
241 Variable* this_function_var = scope()->this_function_var();
242 if (this_function_var != nullptr) {
243 Comment cmnt(masm_, "[ This function");
244 if (!function_in_register_r1) {
245 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
246 // The write barrier clobbers register again, keep it marked as such.
247 }
248 SetVar(this_function_var, r1, r0, r2);
249 }
250
251 // Possibly set up a local binding to the new target value.
252 Variable* new_target_var = scope()->new_target_var();
253 if (new_target_var != nullptr) {
254 Comment cmnt(masm_, "[ new.target");
255 SetVar(new_target_var, r3, r0, r2);
256 }
257
258 // Possibly allocate RestParameters
259 int rest_index;
260 Variable* rest_param = scope()->rest_parameter(&rest_index);
261 if (rest_param) {
262 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100263 if (!function_in_register_r1) {
264 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
265 }
266 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100268 function_in_register_r1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000269 SetVar(rest_param, r0, r1, r2);
270 }
271
272 Variable* arguments = scope()->arguments();
273 if (arguments != NULL) {
274 // Function uses arguments object.
275 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276 if (!function_in_register_r1) {
277 // Load this again, if it's used by the local context below.
278 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
279 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100280 if (is_strict(language_mode()) || !has_simple_parameters()) {
281 FastNewStrictArgumentsStub stub(isolate());
282 __ CallStub(&stub);
283 } else if (literal()->has_duplicate_parameters()) {
284 __ Push(r1);
285 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
286 } else {
287 FastNewSloppyArgumentsStub stub(isolate());
288 __ CallStub(&stub);
289 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000290
291 SetVar(arguments, r0, r1, r2);
292 }
293
294 if (FLAG_trace) {
295 __ CallRuntime(Runtime::kTraceEnter);
296 }
297
Ben Murdochda12d292016-06-02 14:46:10 +0100298 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100299 PrepareForBailoutForId(BailoutId::FunctionEntry(),
300 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100301 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100303 VisitDeclarations(scope()->declarations());
304 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305
Ben Murdochda12d292016-06-02 14:46:10 +0100306 // Assert that the declarations do not use ICs. Otherwise the debugger
307 // won't be able to redirect a PC at an IC to the correct IC in newly
308 // recompiled code.
309 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310
Ben Murdochda12d292016-06-02 14:46:10 +0100311 {
312 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100313 PrepareForBailoutForId(BailoutId::Declarations(),
314 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100315 Label ok;
316 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
317 __ cmp(sp, Operand(ip));
318 __ b(hs, &ok);
319 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
320 PredictableCodeSizeScope predictable(masm_);
321 predictable.ExpectSize(
322 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
323 __ Call(stack_check, RelocInfo::CODE_TARGET);
324 __ bind(&ok);
325 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326
Ben Murdochda12d292016-06-02 14:46:10 +0100327 {
328 Comment cmnt(masm_, "[ Body");
329 DCHECK(loop_depth() == 0);
330 VisitStatements(literal()->body());
331 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 }
333
334 // Always emit a 'return undefined' in case control fell off the end of
335 // the body.
336 { Comment cmnt(masm_, "[ return <undefined>;");
337 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
338 }
339 EmitReturnSequence();
340
341 // Force emit the constant pool, so it doesn't get emitted in the middle
342 // of the back edge table.
343 masm()->CheckConstPool(true, false);
344}
345
346
347void FullCodeGenerator::ClearAccumulator() {
348 __ mov(r0, Operand(Smi::FromInt(0)));
349}
350
351
352void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
353 __ mov(r2, Operand(profiling_counter_));
354 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
355 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
356 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
357}
358
359
360#ifdef CAN_USE_ARMV7_INSTRUCTIONS
361static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
362#else
363static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
364#endif
365
366
367void FullCodeGenerator::EmitProfilingCounterReset() {
368 Assembler::BlockConstPoolScope block_const_pool(masm_);
369 PredictableCodeSizeScope predictable_code_size_scope(
370 masm_, kProfileCounterResetSequenceLength);
371 Label start;
372 __ bind(&start);
373 int reset_value = FLAG_interrupt_budget;
374 __ mov(r2, Operand(profiling_counter_));
375 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
376 // instructions (for ARMv6) depending upon whether it is an extended constant
377 // pool - insert nop to compensate.
378 int expected_instr_count =
379 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
380 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
381 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
382 __ nop();
383 }
384 __ mov(r3, Operand(Smi::FromInt(reset_value)));
385 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
386}
387
388
389void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
390 Label* back_edge_target) {
391 Comment cmnt(masm_, "[ Back edge bookkeeping");
392 // Block literal pools whilst emitting back edge code.
393 Assembler::BlockConstPoolScope block_const_pool(masm_);
394 Label ok;
395
396 DCHECK(back_edge_target->is_bound());
397 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
398 int weight = Min(kMaxBackEdgeWeight,
399 Max(1, distance / kCodeSizeMultiplier));
400 EmitProfilingCounterDecrement(weight);
401 __ b(pl, &ok);
402 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
403
404 // Record a mapping of this PC offset to the OSR id. This is used to find
405 // the AST id from the unoptimized code in order to use it as a key into
406 // the deoptimization input data found in the optimized code.
407 RecordBackEdge(stmt->OsrEntryId());
408
409 EmitProfilingCounterReset();
410
411 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100412 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000413 // Record a mapping of the OSR id to this PC. This is used if the OSR
414 // entry becomes the target of a bailout. We don't expect it to be, but
415 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100416 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000417}
418
Ben Murdoch097c5b22016-05-18 11:27:45 +0100419void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
420 bool is_tail_call) {
421 // Pretend that the exit is a backwards jump to the entry.
422 int weight = 1;
423 if (info_->ShouldSelfOptimize()) {
424 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
425 } else {
426 int distance = masm_->pc_offset();
427 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
428 }
429 EmitProfilingCounterDecrement(weight);
430 Label ok;
431 __ b(pl, &ok);
432 // Don't need to save result register if we are going to do a tail call.
433 if (!is_tail_call) {
434 __ push(r0);
435 }
436 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
437 if (!is_tail_call) {
438 __ pop(r0);
439 }
440 EmitProfilingCounterReset();
441 __ bind(&ok);
442}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443
444void FullCodeGenerator::EmitReturnSequence() {
445 Comment cmnt(masm_, "[ Return sequence");
446 if (return_label_.is_bound()) {
447 __ b(&return_label_);
448 } else {
449 __ bind(&return_label_);
450 if (FLAG_trace) {
451 // Push the return value on the stack as the parameter.
452 // Runtime::TraceExit returns its parameter in r0.
453 __ push(r0);
454 __ CallRuntime(Runtime::kTraceExit);
455 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100456 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000457
458 // Make sure that the constant pool is not emitted inside of the return
459 // sequence.
460 { Assembler::BlockConstPoolScope block_const_pool(masm_);
461 int32_t arg_count = info_->scope()->num_parameters() + 1;
462 int32_t sp_delta = arg_count * kPointerSize;
463 SetReturnPosition(literal());
464 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
465 PredictableCodeSizeScope predictable(masm_, -1);
466 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
467 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
468 __ add(sp, sp, Operand(sp_delta));
469 __ Jump(lr);
470 }
471 }
472 }
473}
474
Ben Murdochc5610432016-08-08 18:44:38 +0100475void FullCodeGenerator::RestoreContext() {
476 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
477}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478
479void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
480 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
481 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100482 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483}
484
485
486void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
487}
488
489
490void FullCodeGenerator::AccumulatorValueContext::Plug(
491 Heap::RootListIndex index) const {
492 __ LoadRoot(result_register(), index);
493}
494
495
496void FullCodeGenerator::StackValueContext::Plug(
497 Heap::RootListIndex index) const {
498 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100499 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000500}
501
502
503void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
504 codegen()->PrepareForBailoutBeforeSplit(condition(),
505 true,
506 true_label_,
507 false_label_);
508 if (index == Heap::kUndefinedValueRootIndex ||
509 index == Heap::kNullValueRootIndex ||
510 index == Heap::kFalseValueRootIndex) {
511 if (false_label_ != fall_through_) __ b(false_label_);
512 } else if (index == Heap::kTrueValueRootIndex) {
513 if (true_label_ != fall_through_) __ b(true_label_);
514 } else {
515 __ LoadRoot(result_register(), index);
516 codegen()->DoTest(this);
517 }
518}
519
520
521void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
522}
523
524
525void FullCodeGenerator::AccumulatorValueContext::Plug(
526 Handle<Object> lit) const {
527 __ mov(result_register(), Operand(lit));
528}
529
530
531void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
532 // Immediates cannot be pushed directly.
533 __ mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100534 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000535}
536
537
538void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
539 codegen()->PrepareForBailoutBeforeSplit(condition(),
540 true,
541 true_label_,
542 false_label_);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100543 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
544 !lit->IsUndetectable());
545 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
546 lit->IsFalse(isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000547 if (false_label_ != fall_through_) __ b(false_label_);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100548 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 if (true_label_ != fall_through_) __ b(true_label_);
550 } else if (lit->IsString()) {
551 if (String::cast(*lit)->length() == 0) {
552 if (false_label_ != fall_through_) __ b(false_label_);
553 } else {
554 if (true_label_ != fall_through_) __ b(true_label_);
555 }
556 } else if (lit->IsSmi()) {
557 if (Smi::cast(*lit)->value() == 0) {
558 if (false_label_ != fall_through_) __ b(false_label_);
559 } else {
560 if (true_label_ != fall_through_) __ b(true_label_);
561 }
562 } else {
563 // For simplicity we always test the accumulator register.
564 __ mov(result_register(), Operand(lit));
565 codegen()->DoTest(this);
566 }
567}
568
569
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000570void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
571 Register reg) const {
572 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100573 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 __ str(reg, MemOperand(sp, 0));
575}
576
577
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000578void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
579 Label* materialize_false) const {
580 DCHECK(materialize_true == materialize_false);
581 __ bind(materialize_true);
582}
583
584
585void FullCodeGenerator::AccumulatorValueContext::Plug(
586 Label* materialize_true,
587 Label* materialize_false) const {
588 Label done;
589 __ bind(materialize_true);
590 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
591 __ jmp(&done);
592 __ bind(materialize_false);
593 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
594 __ bind(&done);
595}
596
597
598void FullCodeGenerator::StackValueContext::Plug(
599 Label* materialize_true,
600 Label* materialize_false) const {
601 Label done;
602 __ bind(materialize_true);
603 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
604 __ jmp(&done);
605 __ bind(materialize_false);
606 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
607 __ bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100608 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000609}
610
611
612void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
613 Label* materialize_false) const {
614 DCHECK(materialize_true == true_label_);
615 DCHECK(materialize_false == false_label_);
616}
617
618
619void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
620 Heap::RootListIndex value_root_index =
621 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
622 __ LoadRoot(result_register(), value_root_index);
623}
624
625
626void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
627 Heap::RootListIndex value_root_index =
628 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
629 __ LoadRoot(ip, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100630 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000631}
632
633
634void FullCodeGenerator::TestContext::Plug(bool flag) const {
635 codegen()->PrepareForBailoutBeforeSplit(condition(),
636 true,
637 true_label_,
638 false_label_);
639 if (flag) {
640 if (true_label_ != fall_through_) __ b(true_label_);
641 } else {
642 if (false_label_ != fall_through_) __ b(false_label_);
643 }
644}
645
646
647void FullCodeGenerator::DoTest(Expression* condition,
648 Label* if_true,
649 Label* if_false,
650 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100651 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000652 CallIC(ic, condition->test_id());
653 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
654 Split(eq, if_true, if_false, fall_through);
655}
656
657
658void FullCodeGenerator::Split(Condition cond,
659 Label* if_true,
660 Label* if_false,
661 Label* fall_through) {
662 if (if_false == fall_through) {
663 __ b(cond, if_true);
664 } else if (if_true == fall_through) {
665 __ b(NegateCondition(cond), if_false);
666 } else {
667 __ b(cond, if_true);
668 __ b(if_false);
669 }
670}
671
672
673MemOperand FullCodeGenerator::StackOperand(Variable* var) {
674 DCHECK(var->IsStackAllocated());
675 // Offset is negative because higher indexes are at lower addresses.
676 int offset = -var->index() * kPointerSize;
677 // Adjust by a (parameter or local) base offset.
678 if (var->IsParameter()) {
679 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
680 } else {
681 offset += JavaScriptFrameConstants::kLocal0Offset;
682 }
683 return MemOperand(fp, offset);
684}
685
686
687MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
688 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
689 if (var->IsContextSlot()) {
690 int context_chain_length = scope()->ContextChainLength(var->scope());
691 __ LoadContext(scratch, context_chain_length);
692 return ContextMemOperand(scratch, var->index());
693 } else {
694 return StackOperand(var);
695 }
696}
697
698
699void FullCodeGenerator::GetVar(Register dest, Variable* var) {
700 // Use destination as scratch.
701 MemOperand location = VarOperand(var, dest);
702 __ ldr(dest, location);
703}
704
705
706void FullCodeGenerator::SetVar(Variable* var,
707 Register src,
708 Register scratch0,
709 Register scratch1) {
710 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
711 DCHECK(!scratch0.is(src));
712 DCHECK(!scratch0.is(scratch1));
713 DCHECK(!scratch1.is(src));
714 MemOperand location = VarOperand(var, scratch0);
715 __ str(src, location);
716
717 // Emit the write barrier code if the location is in the heap.
718 if (var->IsContextSlot()) {
719 __ RecordWriteContextSlot(scratch0,
720 location.offset(),
721 src,
722 scratch1,
723 kLRHasBeenSaved,
724 kDontSaveFPRegs);
725 }
726}
727
728
729void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
730 bool should_normalize,
731 Label* if_true,
732 Label* if_false) {
733 // Only prepare for bailouts before splits if we're in a test
734 // context. Otherwise, we let the Visit function deal with the
735 // preparation to avoid preparing with the same AST id twice.
736 if (!context()->IsTest()) return;
737
738 Label skip;
739 if (should_normalize) __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100740 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000741 if (should_normalize) {
742 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
743 __ cmp(r0, ip);
744 Split(eq, if_true, if_false, NULL);
745 __ bind(&skip);
746 }
747}
748
749
750void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
751 // The variable in the declaration always resides in the current function
752 // context.
753 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100754 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000755 // Check that we're not inside a with or catch context.
756 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
757 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
758 __ Check(ne, kDeclarationInWithContext);
759 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
760 __ Check(ne, kDeclarationInCatchContext);
761 }
762}
763
764
765void FullCodeGenerator::VisitVariableDeclaration(
766 VariableDeclaration* declaration) {
767 // If it was not possible to allocate the variable at compile time, we
768 // need to "declare" it at runtime to make sure it actually exists in the
769 // local context.
770 VariableProxy* proxy = declaration->proxy();
771 VariableMode mode = declaration->mode();
772 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100773 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000774 switch (variable->location()) {
775 case VariableLocation::GLOBAL:
776 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100777 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000778 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100779 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000780 break;
781
782 case VariableLocation::PARAMETER:
783 case VariableLocation::LOCAL:
784 if (hole_init) {
785 Comment cmnt(masm_, "[ VariableDeclaration");
786 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
787 __ str(r0, StackOperand(variable));
788 }
789 break;
790
791 case VariableLocation::CONTEXT:
792 if (hole_init) {
793 Comment cmnt(masm_, "[ VariableDeclaration");
794 EmitDebugCheckDeclarationContext(variable);
795 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
796 __ str(r0, ContextMemOperand(cp, variable->index()));
797 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100798 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000799 }
800 break;
801
802 case VariableLocation::LOOKUP: {
803 Comment cmnt(masm_, "[ VariableDeclaration");
Ben Murdoch61f157c2016-09-16 13:49:30 +0100804 DCHECK_EQ(VAR, mode);
805 DCHECK(!hole_init);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000806 __ mov(r2, Operand(variable->name()));
Ben Murdoch61f157c2016-09-16 13:49:30 +0100807 __ Push(r2);
808 __ CallRuntime(Runtime::kDeclareEvalVar);
Ben Murdochc5610432016-08-08 18:44:38 +0100809 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000810 break;
811 }
812 }
813}
814
815
816void FullCodeGenerator::VisitFunctionDeclaration(
817 FunctionDeclaration* declaration) {
818 VariableProxy* proxy = declaration->proxy();
819 Variable* variable = proxy->var();
820 switch (variable->location()) {
821 case VariableLocation::GLOBAL:
822 case VariableLocation::UNALLOCATED: {
823 globals_->Add(variable->name(), zone());
824 Handle<SharedFunctionInfo> function =
825 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
826 // Check for stack-overflow exception.
827 if (function.is_null()) return SetStackOverflow();
828 globals_->Add(function, zone());
829 break;
830 }
831
832 case VariableLocation::PARAMETER:
833 case VariableLocation::LOCAL: {
834 Comment cmnt(masm_, "[ FunctionDeclaration");
835 VisitForAccumulatorValue(declaration->fun());
836 __ str(result_register(), StackOperand(variable));
837 break;
838 }
839
840 case VariableLocation::CONTEXT: {
841 Comment cmnt(masm_, "[ FunctionDeclaration");
842 EmitDebugCheckDeclarationContext(variable);
843 VisitForAccumulatorValue(declaration->fun());
844 __ str(result_register(), ContextMemOperand(cp, variable->index()));
845 int offset = Context::SlotOffset(variable->index());
846 // We know that we have written a function, which is not a smi.
847 __ RecordWriteContextSlot(cp,
848 offset,
849 result_register(),
850 r2,
851 kLRHasBeenSaved,
852 kDontSaveFPRegs,
853 EMIT_REMEMBERED_SET,
854 OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100855 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000856 break;
857 }
858
859 case VariableLocation::LOOKUP: {
860 Comment cmnt(masm_, "[ FunctionDeclaration");
861 __ mov(r2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100862 PushOperand(r2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000863 // Push initial value for function declaration.
864 VisitForStackValue(declaration->fun());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100865 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
Ben Murdochc5610432016-08-08 18:44:38 +0100866 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000867 break;
868 }
869 }
870}
871
872
873void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
874 // Call the runtime to declare the globals.
875 __ mov(r1, Operand(pairs));
876 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
877 __ Push(r1, r0);
878 __ CallRuntime(Runtime::kDeclareGlobals);
879 // Return value is ignored.
880}
881
882
883void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
884 // Call the runtime to declare the modules.
885 __ Push(descriptions);
886 __ CallRuntime(Runtime::kDeclareModules);
887 // Return value is ignored.
888}
889
890
891void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
892 Comment cmnt(masm_, "[ SwitchStatement");
893 Breakable nested_statement(this, stmt);
894 SetStatementPosition(stmt);
895
896 // Keep the switch value on the stack until a case matches.
897 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100898 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000899
900 ZoneList<CaseClause*>* clauses = stmt->cases();
901 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
902
903 Label next_test; // Recycled for each test.
904 // Compile all the tests with branches to their bodies.
905 for (int i = 0; i < clauses->length(); i++) {
906 CaseClause* clause = clauses->at(i);
907 clause->body_target()->Unuse();
908
909 // The default is not a test, but remember it as final fall through.
910 if (clause->is_default()) {
911 default_clause = clause;
912 continue;
913 }
914
915 Comment cmnt(masm_, "[ Case comparison");
916 __ bind(&next_test);
917 next_test.Unuse();
918
919 // Compile the label expression.
920 VisitForAccumulatorValue(clause->label());
921
922 // Perform the comparison as if via '==='.
923 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
924 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
925 JumpPatchSite patch_site(masm_);
926 if (inline_smi_code) {
927 Label slow_case;
928 __ orr(r2, r1, r0);
929 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
930
931 __ cmp(r1, r0);
932 __ b(ne, &next_test);
933 __ Drop(1); // Switch value is no longer needed.
934 __ b(clause->body_target());
935 __ bind(&slow_case);
936 }
937
938 // Record position before stub call for type feedback.
939 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100940 Handle<Code> ic =
941 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942 CallIC(ic, clause->CompareId());
943 patch_site.EmitPatchInfo();
944
945 Label skip;
946 __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100947 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
949 __ cmp(r0, ip);
950 __ b(ne, &next_test);
951 __ Drop(1);
952 __ jmp(clause->body_target());
953 __ bind(&skip);
954
955 __ cmp(r0, Operand::Zero());
956 __ b(ne, &next_test);
957 __ Drop(1); // Switch value is no longer needed.
958 __ b(clause->body_target());
959 }
960
961 // Discard the test value and jump to the default if present, otherwise to
962 // the end of the statement.
963 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100964 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000965 if (default_clause == NULL) {
966 __ b(nested_statement.break_label());
967 } else {
968 __ b(default_clause->body_target());
969 }
970
971 // Compile all the case bodies.
972 for (int i = 0; i < clauses->length(); i++) {
973 Comment cmnt(masm_, "[ Case body");
974 CaseClause* clause = clauses->at(i);
975 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100976 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000977 VisitStatements(clause->statements());
978 }
979
980 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100981 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000982}
983
984
985void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
986 Comment cmnt(masm_, "[ ForInStatement");
987 SetStatementPosition(stmt, SKIP_BREAK);
988
989 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
990
Ben Murdoch097c5b22016-05-18 11:27:45 +0100991 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000992 SetExpressionAsStatementPosition(stmt->enumerable());
993 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100994 OperandStackDepthIncrement(5);
995
996 Label loop, exit;
997 Iteration loop_statement(this, stmt);
998 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999
Ben Murdoch097c5b22016-05-18 11:27:45 +01001000 // If the object is null or undefined, skip over the loop, otherwise convert
1001 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001002 Label convert, done_convert;
1003 __ JumpIfSmi(r0, &convert);
1004 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
1005 __ b(ge, &done_convert);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001006 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1007 __ b(eq, &exit);
1008 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1009 __ b(eq, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001010 __ bind(&convert);
1011 ToObjectStub stub(isolate());
1012 __ CallStub(&stub);
1013 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +01001014 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001015 __ push(r0);
1016
Ben Murdochc5610432016-08-08 18:44:38 +01001017 // Check cache validity in generated code. If we cannot guarantee cache
1018 // validity, call the runtime system to check cache validity or get the
1019 // property names in a fixed array. Note: Proxies never have an enum cache,
1020 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001021 Label call_runtime;
1022 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001023
1024 // The enum cache is valid. Load the map of the object being
1025 // iterated over and use the cache for the iteration.
1026 Label use_cache;
1027 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1028 __ b(&use_cache);
1029
1030 // Get the set of properties to enumerate.
1031 __ bind(&call_runtime);
1032 __ push(r0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001033 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +01001034 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035
1036 // If we got a map from the runtime call, we can do a fast
1037 // modification check. Otherwise, we got a fixed array, and we have
1038 // to do a slow check.
1039 Label fixed_array;
1040 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1041 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1042 __ cmp(r2, ip);
1043 __ b(ne, &fixed_array);
1044
1045 // We got a map in register r0. Get the enumeration cache from it.
1046 Label no_descriptors;
1047 __ bind(&use_cache);
1048
1049 __ EnumLength(r1, r0);
1050 __ cmp(r1, Operand(Smi::FromInt(0)));
1051 __ b(eq, &no_descriptors);
1052
1053 __ LoadInstanceDescriptors(r0, r2);
1054 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1055 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1056
1057 // Set up the four remaining stack slots.
1058 __ push(r0); // Map.
1059 __ mov(r0, Operand(Smi::FromInt(0)));
1060 // Push enumeration cache, enumeration cache length (as smi) and zero.
1061 __ Push(r2, r1, r0);
1062 __ jmp(&loop);
1063
1064 __ bind(&no_descriptors);
1065 __ Drop(1);
1066 __ jmp(&exit);
1067
1068 // We got a fixed array in register r0. Iterate through that.
1069 __ bind(&fixed_array);
1070
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001071 __ mov(r1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1072 __ Push(r1, r0); // Smi and array
1073 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001074 __ Push(r1); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001075 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001076 __ mov(r0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001077 __ Push(r0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001078
1079 // Generate code for doing the condition check.
1080 __ bind(&loop);
1081 SetExpressionAsStatementPosition(stmt->each());
1082
1083 // Load the current count to r0, load the length to r1.
1084 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1085 __ cmp(r0, r1); // Compare to the array length.
1086 __ b(hs, loop_statement.break_label());
1087
1088 // Get the current entry of the array into register r3.
1089 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1090 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1091 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1092
1093 // Get the expected map from the stack or a smi in the
1094 // permanent slow case into register r2.
1095 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1096
1097 // Check if the expected map still matches that of the enumerable.
1098 // If not, we may have to filter the key.
1099 Label update_each;
1100 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1101 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1102 __ cmp(r4, Operand(r2));
1103 __ b(eq, &update_each);
1104
Ben Murdochda12d292016-06-02 14:46:10 +01001105 // We need to filter the key, record slow-path here.
1106 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001107 __ EmitLoadTypeFeedbackVector(r0);
1108 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1109 __ str(r2, FieldMemOperand(r0, FixedArray::OffsetOfElementAt(vector_index)));
1110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001111 // Convert the entry to a string or (smi) 0 if it isn't a property
1112 // any more. If the property has been removed while iterating, we
1113 // just skip it.
1114 __ push(r1); // Enumerable.
1115 __ push(r3); // Current entry.
1116 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001117 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001118 __ mov(r3, Operand(r0));
1119 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1120 __ cmp(r0, ip);
1121 __ b(eq, loop_statement.continue_label());
1122
1123 // Update the 'each' property or variable from the possibly filtered
1124 // entry in register r3.
1125 __ bind(&update_each);
1126 __ mov(result_register(), r3);
1127 // Perform the assignment as if via '='.
1128 { EffectContext context(this);
1129 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001130 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001131 }
1132
1133 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001134 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001135 // Generate code for the body of the loop.
1136 Visit(stmt->body());
1137
1138 // Generate code for the going to the next element by incrementing
1139 // the index (smi) stored on top of the stack.
1140 __ bind(loop_statement.continue_label());
1141 __ pop(r0);
1142 __ add(r0, r0, Operand(Smi::FromInt(1)));
1143 __ push(r0);
1144
1145 EmitBackEdgeBookkeeping(stmt, &loop);
1146 __ b(&loop);
1147
1148 // Remove the pointers stored on the stack.
1149 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001150 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151
1152 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001153 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001154 __ bind(&exit);
1155 decrement_loop_depth();
1156}
1157
1158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001159void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1160 FeedbackVectorSlot slot) {
1161 DCHECK(NeedsHomeObject(initializer));
1162 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1163 __ mov(StoreDescriptor::NameRegister(),
1164 Operand(isolate()->factory()->home_object_symbol()));
1165 __ ldr(StoreDescriptor::ValueRegister(),
1166 MemOperand(sp, offset * kPointerSize));
1167 EmitLoadStoreICSlot(slot);
1168 CallStoreIC();
1169}
1170
1171
1172void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1173 int offset,
1174 FeedbackVectorSlot slot) {
1175 DCHECK(NeedsHomeObject(initializer));
1176 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1177 __ mov(StoreDescriptor::NameRegister(),
1178 Operand(isolate()->factory()->home_object_symbol()));
1179 __ ldr(StoreDescriptor::ValueRegister(),
1180 MemOperand(sp, offset * kPointerSize));
1181 EmitLoadStoreICSlot(slot);
1182 CallStoreIC();
1183}
1184
1185
1186void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1187 TypeofMode typeof_mode,
1188 Label* slow) {
1189 Register current = cp;
1190 Register next = r1;
1191 Register temp = r2;
1192
1193 Scope* s = scope();
1194 while (s != NULL) {
1195 if (s->num_heap_slots() > 0) {
1196 if (s->calls_sloppy_eval()) {
1197 // Check that extension is "the hole".
1198 __ ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1199 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1200 }
1201 // Load next context in chain.
1202 __ ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1203 // Walk the rest of the chain without clobbering cp.
1204 current = next;
1205 }
1206 // If no outer scope calls eval, we do not need to check more
1207 // context extensions.
1208 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1209 s = s->outer_scope();
1210 }
1211
1212 if (s->is_eval_scope()) {
1213 Label loop, fast;
1214 if (!current.is(next)) {
1215 __ Move(next, current);
1216 }
1217 __ bind(&loop);
1218 // Terminate at native context.
1219 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1220 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1221 __ cmp(temp, ip);
1222 __ b(eq, &fast);
1223 // Check that extension is "the hole".
1224 __ ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1225 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1226 // Load next context in chain.
1227 __ ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1228 __ b(&loop);
1229 __ bind(&fast);
1230 }
1231
1232 // All extension objects were empty and it is safe to use a normal global
1233 // load machinery.
1234 EmitGlobalVariableLoad(proxy, typeof_mode);
1235}
1236
1237
1238MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1239 Label* slow) {
1240 DCHECK(var->IsContextSlot());
1241 Register context = cp;
1242 Register next = r3;
1243 Register temp = r4;
1244
1245 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1246 if (s->num_heap_slots() > 0) {
1247 if (s->calls_sloppy_eval()) {
1248 // Check that extension is "the hole".
1249 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1250 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1251 }
1252 __ ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1253 // Walk the rest of the chain without clobbering cp.
1254 context = next;
1255 }
1256 }
1257 // Check that last extension is "the hole".
1258 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1259 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1260
1261 // This function is used only for loads, not stores, so it's safe to
1262 // return an cp-based operand (the write barrier cannot be allowed to
1263 // destroy the cp register).
1264 return ContextMemOperand(context, var->index());
1265}
1266
1267
1268void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1269 TypeofMode typeof_mode,
1270 Label* slow, Label* done) {
1271 // Generate fast-case code for variables that might be shadowed by
1272 // eval-introduced variables. Eval is used a lot without
1273 // introducing variables. In those cases, we do not want to
1274 // perform a runtime call for all variables in the scope
1275 // containing the eval.
1276 Variable* var = proxy->var();
1277 if (var->mode() == DYNAMIC_GLOBAL) {
1278 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1279 __ jmp(done);
1280 } else if (var->mode() == DYNAMIC_LOCAL) {
1281 Variable* local = var->local_if_not_shadowed();
1282 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001283 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001284 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01001285 __ b(ne, done);
1286 __ mov(r0, Operand(var->name()));
1287 __ push(r0);
1288 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001289 }
1290 __ jmp(done);
1291 }
1292}
1293
1294
1295void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1296 TypeofMode typeof_mode) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001297#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001298 Variable* var = proxy->var();
1299 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1300 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001301#endif
1302 __ mov(LoadGlobalDescriptor::SlotRegister(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001303 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001304 CallLoadGlobalIC(typeof_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001305}
1306
1307
1308void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1309 TypeofMode typeof_mode) {
1310 // Record position before possible IC call.
1311 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001312 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001313 Variable* var = proxy->var();
1314
1315 // Three cases: global variables, lookup variables, and all other types of
1316 // variables.
1317 switch (var->location()) {
1318 case VariableLocation::GLOBAL:
1319 case VariableLocation::UNALLOCATED: {
1320 Comment cmnt(masm_, "[ Global variable");
1321 EmitGlobalVariableLoad(proxy, typeof_mode);
1322 context()->Plug(r0);
1323 break;
1324 }
1325
1326 case VariableLocation::PARAMETER:
1327 case VariableLocation::LOCAL:
1328 case VariableLocation::CONTEXT: {
1329 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1330 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1331 : "[ Stack variable");
1332 if (NeedsHoleCheckForLoad(proxy)) {
1333 // Let and const need a read barrier.
1334 GetVar(r0, var);
1335 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1336 if (var->mode() == LET || var->mode() == CONST) {
1337 // Throw a reference error when using an uninitialized let/const
1338 // binding in harmony mode.
1339 Label done;
1340 __ b(ne, &done);
1341 __ mov(r0, Operand(var->name()));
1342 __ push(r0);
1343 __ CallRuntime(Runtime::kThrowReferenceError);
1344 __ bind(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001345 }
1346 context()->Plug(r0);
1347 break;
1348 }
1349 context()->Plug(var);
1350 break;
1351 }
1352
1353 case VariableLocation::LOOKUP: {
1354 Comment cmnt(masm_, "[ Lookup variable");
1355 Label done, slow;
1356 // Generate code for loading from variables potentially shadowed
1357 // by eval-introduced variables.
1358 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1359 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001360 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 Runtime::FunctionId function_id =
1362 typeof_mode == NOT_INSIDE_TYPEOF
1363 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001364 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001365 __ CallRuntime(function_id);
1366 __ bind(&done);
1367 context()->Plug(r0);
1368 }
1369 }
1370}
1371
1372
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001373void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1374 Expression* expression = (property == NULL) ? NULL : property->value();
1375 if (expression == NULL) {
1376 __ LoadRoot(r1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001377 PushOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001378 } else {
1379 VisitForStackValue(expression);
1380 if (NeedsHomeObject(expression)) {
1381 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1382 property->kind() == ObjectLiteral::Property::SETTER);
1383 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1384 EmitSetHomeObject(expression, offset, property->GetSlot());
1385 }
1386 }
1387}
1388
1389
1390void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1391 Comment cmnt(masm_, "[ ObjectLiteral");
1392
1393 Handle<FixedArray> constant_properties = expr->constant_properties();
1394 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1395 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1396 __ mov(r1, Operand(constant_properties));
1397 int flags = expr->ComputeFlags();
1398 __ mov(r0, Operand(Smi::FromInt(flags)));
1399 if (MustCreateObjectLiteralWithRuntime(expr)) {
1400 __ Push(r3, r2, r1, r0);
1401 __ CallRuntime(Runtime::kCreateObjectLiteral);
1402 } else {
1403 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1404 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001405 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001406 }
Ben Murdochc5610432016-08-08 18:44:38 +01001407 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001408
1409 // If result_saved is true the result is on top of the stack. If
1410 // result_saved is false the result is in r0.
1411 bool result_saved = false;
1412
1413 AccessorTable accessor_table(zone());
1414 int property_index = 0;
1415 for (; property_index < expr->properties()->length(); property_index++) {
1416 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1417 if (property->is_computed_name()) break;
1418 if (property->IsCompileTimeValue()) continue;
1419
1420 Literal* key = property->key()->AsLiteral();
1421 Expression* value = property->value();
1422 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001423 PushOperand(r0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001424 result_saved = true;
1425 }
1426 switch (property->kind()) {
1427 case ObjectLiteral::Property::CONSTANT:
1428 UNREACHABLE();
1429 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1430 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1431 // Fall through.
1432 case ObjectLiteral::Property::COMPUTED:
1433 // It is safe to use [[Put]] here because the boilerplate already
1434 // contains computed properties with an uninitialized value.
1435 if (key->value()->IsInternalizedString()) {
1436 if (property->emit_store()) {
1437 VisitForAccumulatorValue(value);
1438 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1439 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1440 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1441 EmitLoadStoreICSlot(property->GetSlot(0));
1442 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001443 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001444
1445 if (NeedsHomeObject(value)) {
1446 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1447 }
1448 } else {
1449 VisitForEffect(value);
1450 }
1451 break;
1452 }
1453 // Duplicate receiver on stack.
1454 __ ldr(r0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001455 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001456 VisitForStackValue(key);
1457 VisitForStackValue(value);
1458 if (property->emit_store()) {
1459 if (NeedsHomeObject(value)) {
1460 EmitSetHomeObject(value, 2, property->GetSlot());
1461 }
1462 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
Ben Murdoch097c5b22016-05-18 11:27:45 +01001463 PushOperand(r0);
1464 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001465 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001466 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001467 }
1468 break;
1469 case ObjectLiteral::Property::PROTOTYPE:
1470 // Duplicate receiver on stack.
1471 __ ldr(r0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001472 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001473 VisitForStackValue(value);
1474 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001475 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001476 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001477 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001478 break;
1479
1480 case ObjectLiteral::Property::GETTER:
1481 if (property->emit_store()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001482 AccessorTable::Iterator it = accessor_table.lookup(key);
1483 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1484 it->second->getter = property;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001485 }
1486 break;
1487 case ObjectLiteral::Property::SETTER:
1488 if (property->emit_store()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001489 AccessorTable::Iterator it = accessor_table.lookup(key);
1490 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1491 it->second->setter = property;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001492 }
1493 break;
1494 }
1495 }
1496
1497 // Emit code to define accessors, using only a single call to the runtime for
1498 // each pair of corresponding getters and setters.
1499 for (AccessorTable::Iterator it = accessor_table.begin();
1500 it != accessor_table.end();
1501 ++it) {
1502 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001503 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001504 VisitForStackValue(it->first);
1505 EmitAccessor(it->second->getter);
1506 EmitAccessor(it->second->setter);
1507 __ mov(r0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001508 PushOperand(r0);
1509 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001510 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001511 }
1512
1513 // Object literals have two parts. The "static" part on the left contains no
1514 // computed property names, and so we can compute its map ahead of time; see
1515 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1516 // starts with the first computed property name, and continues with all
1517 // properties to its right. All the code from above initializes the static
1518 // component of the object literal, and arranges for the map of the result to
1519 // reflect the static order in which the keys appear. For the dynamic
1520 // properties, we compile them into a series of "SetOwnProperty" runtime
1521 // calls. This will preserve insertion order.
1522 for (; property_index < expr->properties()->length(); property_index++) {
1523 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1524
1525 Expression* value = property->value();
1526 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001527 PushOperand(r0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528 result_saved = true;
1529 }
1530
1531 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001532 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001533
1534 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1535 DCHECK(!property->is_computed_name());
1536 VisitForStackValue(value);
1537 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001538 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001539 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001540 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541 } else {
1542 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1543 VisitForStackValue(value);
1544 if (NeedsHomeObject(value)) {
1545 EmitSetHomeObject(value, 2, property->GetSlot());
1546 }
1547
1548 switch (property->kind()) {
1549 case ObjectLiteral::Property::CONSTANT:
1550 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1551 case ObjectLiteral::Property::COMPUTED:
1552 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001553 PushOperand(Smi::FromInt(NONE));
1554 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1555 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001556 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1557 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001559 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560 }
1561 break;
1562
1563 case ObjectLiteral::Property::PROTOTYPE:
1564 UNREACHABLE();
1565 break;
1566
1567 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001568 PushOperand(Smi::FromInt(NONE));
1569 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001570 break;
1571
1572 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001573 PushOperand(Smi::FromInt(NONE));
1574 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575 break;
1576 }
1577 }
1578 }
1579
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001580 if (result_saved) {
1581 context()->PlugTOS();
1582 } else {
1583 context()->Plug(r0);
1584 }
1585}
1586
1587
1588void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1589 Comment cmnt(masm_, "[ ArrayLiteral");
1590
1591 Handle<FixedArray> constant_elements = expr->constant_elements();
1592 bool has_fast_elements =
1593 IsFastObjectElementsKind(expr->constant_elements_kind());
1594 Handle<FixedArrayBase> constant_elements_values(
1595 FixedArrayBase::cast(constant_elements->get(1)));
1596
1597 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1598 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1599 // If the only customer of allocation sites is transitioning, then
1600 // we can turn it off if we don't have anywhere else to transition to.
1601 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1602 }
1603
1604 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1605 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1606 __ mov(r1, Operand(constant_elements));
1607 if (MustCreateArrayLiteralWithRuntime(expr)) {
1608 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1609 __ Push(r3, r2, r1, r0);
1610 __ CallRuntime(Runtime::kCreateArrayLiteral);
1611 } else {
1612 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1613 __ CallStub(&stub);
1614 }
Ben Murdochc5610432016-08-08 18:44:38 +01001615 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001616
1617 bool result_saved = false; // Is the result saved to the stack?
1618 ZoneList<Expression*>* subexprs = expr->values();
1619 int length = subexprs->length();
1620
1621 // Emit code to evaluate all the non-constant subexpressions and to store
1622 // them into the newly cloned array.
1623 int array_index = 0;
1624 for (; array_index < length; array_index++) {
1625 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001626 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627
1628 // If the subexpression is a literal or a simple materialized literal it
1629 // is already set in the cloned array.
1630 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1631
1632 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001633 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634 result_saved = true;
1635 }
1636 VisitForAccumulatorValue(subexpr);
1637
1638 __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1639 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1640 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1641 Handle<Code> ic =
1642 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1643 CallIC(ic);
1644
Ben Murdochc5610432016-08-08 18:44:38 +01001645 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1646 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001647 }
1648
1649 // In case the array literal contains spread expressions it has two parts. The
1650 // first part is the "static" array which has a literal index is handled
1651 // above. The second part is the part after the first spread expression
1652 // (inclusive) and these elements gets appended to the array. Note that the
1653 // number elements an iterable produces is unknown ahead of time.
1654 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001655 PopOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001656 result_saved = false;
1657 }
1658 for (; array_index < length; array_index++) {
1659 Expression* subexpr = subexprs->at(array_index);
1660
Ben Murdoch097c5b22016-05-18 11:27:45 +01001661 PushOperand(r0);
1662 DCHECK(!subexpr->IsSpread());
1663 VisitForStackValue(subexpr);
1664 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001665
Ben Murdochc5610432016-08-08 18:44:38 +01001666 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1667 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001668 }
1669
1670 if (result_saved) {
1671 context()->PlugTOS();
1672 } else {
1673 context()->Plug(r0);
1674 }
1675}
1676
1677
1678void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1679 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1680
1681 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001682
1683 Property* property = expr->target()->AsProperty();
1684 LhsKind assign_type = Property::GetAssignType(property);
1685
1686 // Evaluate LHS expression.
1687 switch (assign_type) {
1688 case VARIABLE:
1689 // Nothing to do here.
1690 break;
1691 case NAMED_PROPERTY:
1692 if (expr->is_compound()) {
1693 // We need the receiver both on the stack and in the register.
1694 VisitForStackValue(property->obj());
1695 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1696 } else {
1697 VisitForStackValue(property->obj());
1698 }
1699 break;
1700 case NAMED_SUPER_PROPERTY:
1701 VisitForStackValue(
1702 property->obj()->AsSuperPropertyReference()->this_var());
1703 VisitForAccumulatorValue(
1704 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001705 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706 if (expr->is_compound()) {
1707 const Register scratch = r1;
1708 __ ldr(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001709 PushOperand(scratch);
1710 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001711 }
1712 break;
1713 case KEYED_SUPER_PROPERTY:
1714 VisitForStackValue(
1715 property->obj()->AsSuperPropertyReference()->this_var());
1716 VisitForStackValue(
1717 property->obj()->AsSuperPropertyReference()->home_object());
1718 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001719 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001720 if (expr->is_compound()) {
1721 const Register scratch = r1;
1722 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001723 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001724 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001725 PushOperand(scratch);
1726 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001727 }
1728 break;
1729 case KEYED_PROPERTY:
1730 if (expr->is_compound()) {
1731 VisitForStackValue(property->obj());
1732 VisitForStackValue(property->key());
1733 __ ldr(LoadDescriptor::ReceiverRegister(),
1734 MemOperand(sp, 1 * kPointerSize));
1735 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1736 } else {
1737 VisitForStackValue(property->obj());
1738 VisitForStackValue(property->key());
1739 }
1740 break;
1741 }
1742
1743 // For compound assignments we need another deoptimization point after the
1744 // variable/property load.
1745 if (expr->is_compound()) {
1746 { AccumulatorValueContext context(this);
1747 switch (assign_type) {
1748 case VARIABLE:
1749 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001750 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 break;
1752 case NAMED_PROPERTY:
1753 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001754 PrepareForBailoutForId(property->LoadId(),
1755 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001756 break;
1757 case NAMED_SUPER_PROPERTY:
1758 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001759 PrepareForBailoutForId(property->LoadId(),
1760 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001761 break;
1762 case KEYED_SUPER_PROPERTY:
1763 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001764 PrepareForBailoutForId(property->LoadId(),
1765 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001766 break;
1767 case KEYED_PROPERTY:
1768 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001769 PrepareForBailoutForId(property->LoadId(),
1770 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001771 break;
1772 }
1773 }
1774
1775 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001776 PushOperand(r0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001777 VisitForAccumulatorValue(expr->value());
1778
1779 AccumulatorValueContext context(this);
1780 if (ShouldInlineSmiCase(op)) {
1781 EmitInlineSmiBinaryOp(expr->binary_operation(),
1782 op,
1783 expr->target(),
1784 expr->value());
1785 } else {
1786 EmitBinaryOp(expr->binary_operation(), op);
1787 }
1788
1789 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001790 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 } else {
1792 VisitForAccumulatorValue(expr->value());
1793 }
1794
1795 SetExpressionPosition(expr);
1796
1797 // Store the value.
1798 switch (assign_type) {
1799 case VARIABLE:
1800 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1801 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001802 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001803 context()->Plug(r0);
1804 break;
1805 case NAMED_PROPERTY:
1806 EmitNamedPropertyAssignment(expr);
1807 break;
1808 case NAMED_SUPER_PROPERTY:
1809 EmitNamedSuperPropertyStore(property);
1810 context()->Plug(r0);
1811 break;
1812 case KEYED_SUPER_PROPERTY:
1813 EmitKeyedSuperPropertyStore(property);
1814 context()->Plug(r0);
1815 break;
1816 case KEYED_PROPERTY:
1817 EmitKeyedPropertyAssignment(expr);
1818 break;
1819 }
1820}
1821
1822
1823void FullCodeGenerator::VisitYield(Yield* expr) {
1824 Comment cmnt(masm_, "[ Yield");
1825 SetExpressionPosition(expr);
1826
1827 // Evaluate yielded value first; the initial iterator definition depends on
1828 // this. It stays on the stack while we update the iterator.
1829 VisitForStackValue(expr->expression());
1830
Ben Murdochc5610432016-08-08 18:44:38 +01001831 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001832
Ben Murdochda12d292016-06-02 14:46:10 +01001833 __ jmp(&suspend);
1834 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001835 // When we arrive here, r0 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001836 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001837 __ ldr(r1, FieldMemOperand(r0, JSGeneratorObject::kResumeModeOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001838 __ ldr(r0, FieldMemOperand(r0, JSGeneratorObject::kInputOrDebugPosOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01001839 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1840 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1841 __ cmp(r1, Operand(Smi::FromInt(JSGeneratorObject::kReturn)));
1842 __ b(lt, &resume);
1843 __ Push(result_register());
1844 __ b(gt, &exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001845 EmitCreateIteratorResult(true);
1846 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001847
Ben Murdochc5610432016-08-08 18:44:38 +01001848 __ bind(&exception);
1849 __ CallRuntime(Runtime::kThrow);
1850
Ben Murdochda12d292016-06-02 14:46:10 +01001851 __ bind(&suspend);
1852 OperandStackDepthIncrement(1); // Not popped on this path.
1853 VisitForAccumulatorValue(expr->generator_object());
1854 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1855 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1856 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1857 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1858 __ mov(r1, cp);
1859 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1860 kLRHasBeenSaved, kDontSaveFPRegs);
1861 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1862 __ cmp(sp, r1);
1863 __ b(eq, &post_runtime);
1864 __ push(r0); // generator object
1865 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001866 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001867 __ bind(&post_runtime);
1868 PopOperand(result_register());
1869 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001870
Ben Murdochda12d292016-06-02 14:46:10 +01001871 __ bind(&resume);
1872 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001873}
1874
Ben Murdoch097c5b22016-05-18 11:27:45 +01001875void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1876 OperandStackDepthIncrement(2);
1877 __ Push(reg1, reg2);
1878}
1879
1880void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1881 OperandStackDepthDecrement(2);
1882 __ Pop(reg1, reg2);
1883}
1884
1885void FullCodeGenerator::EmitOperandStackDepthCheck() {
1886 if (FLAG_debug_code) {
1887 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1888 operand_stack_depth_ * kPointerSize;
1889 __ sub(r0, fp, sp);
1890 __ cmp(r0, Operand(expected_diff));
1891 __ Assert(eq, kUnexpectedStackDepth);
1892 }
1893}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001894
1895void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1896 Label allocate, done_allocate;
1897
Ben Murdochc5610432016-08-08 18:44:38 +01001898 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate,
1899 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001900 __ b(&done_allocate);
1901
1902 __ bind(&allocate);
1903 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1904 __ CallRuntime(Runtime::kAllocateInNewSpace);
1905
1906 __ bind(&done_allocate);
1907 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
Ben Murdochda12d292016-06-02 14:46:10 +01001908 PopOperand(r2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001909 __ LoadRoot(r3,
1910 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1911 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
1912 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
1913 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1914 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1915 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
1916 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
1917}
1918
1919
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001920void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1921 Token::Value op,
1922 Expression* left_expr,
1923 Expression* right_expr) {
1924 Label done, smi_case, stub_call;
1925
1926 Register scratch1 = r2;
1927 Register scratch2 = r3;
1928
1929 // Get the arguments.
1930 Register left = r1;
1931 Register right = r0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001932 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001933
1934 // Perform combined smi check on both operands.
1935 __ orr(scratch1, left, Operand(right));
1936 STATIC_ASSERT(kSmiTag == 0);
1937 JumpPatchSite patch_site(masm_);
1938 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1939
1940 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001941 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001942 CallIC(code, expr->BinaryOperationFeedbackId());
1943 patch_site.EmitPatchInfo();
1944 __ jmp(&done);
1945
1946 __ bind(&smi_case);
1947 // Smi case. This code works the same way as the smi-smi case in the type
1948 // recording binary operation stub, see
1949 switch (op) {
1950 case Token::SAR:
1951 __ GetLeastBitsFromSmi(scratch1, right, 5);
1952 __ mov(right, Operand(left, ASR, scratch1));
1953 __ bic(right, right, Operand(kSmiTagMask));
1954 break;
1955 case Token::SHL: {
1956 __ SmiUntag(scratch1, left);
1957 __ GetLeastBitsFromSmi(scratch2, right, 5);
1958 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1959 __ TrySmiTag(right, scratch1, &stub_call);
1960 break;
1961 }
1962 case Token::SHR: {
1963 __ SmiUntag(scratch1, left);
1964 __ GetLeastBitsFromSmi(scratch2, right, 5);
1965 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1966 __ tst(scratch1, Operand(0xc0000000));
1967 __ b(ne, &stub_call);
1968 __ SmiTag(right, scratch1);
1969 break;
1970 }
1971 case Token::ADD:
1972 __ add(scratch1, left, Operand(right), SetCC);
1973 __ b(vs, &stub_call);
1974 __ mov(right, scratch1);
1975 break;
1976 case Token::SUB:
1977 __ sub(scratch1, left, Operand(right), SetCC);
1978 __ b(vs, &stub_call);
1979 __ mov(right, scratch1);
1980 break;
1981 case Token::MUL: {
1982 __ SmiUntag(ip, right);
1983 __ smull(scratch1, scratch2, left, ip);
1984 __ mov(ip, Operand(scratch1, ASR, 31));
1985 __ cmp(ip, Operand(scratch2));
1986 __ b(ne, &stub_call);
1987 __ cmp(scratch1, Operand::Zero());
1988 __ mov(right, Operand(scratch1), LeaveCC, ne);
1989 __ b(ne, &done);
1990 __ add(scratch2, right, Operand(left), SetCC);
1991 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
1992 __ b(mi, &stub_call);
1993 break;
1994 }
1995 case Token::BIT_OR:
1996 __ orr(right, left, Operand(right));
1997 break;
1998 case Token::BIT_AND:
1999 __ and_(right, left, Operand(right));
2000 break;
2001 case Token::BIT_XOR:
2002 __ eor(right, left, Operand(right));
2003 break;
2004 default:
2005 UNREACHABLE();
2006 }
2007
2008 __ bind(&done);
2009 context()->Plug(r0);
2010}
2011
2012
2013void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002014 for (int i = 0; i < lit->properties()->length(); i++) {
2015 ObjectLiteral::Property* property = lit->properties()->at(i);
2016 Expression* value = property->value();
2017
Ben Murdoch097c5b22016-05-18 11:27:45 +01002018 Register scratch = r1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002019 if (property->is_static()) {
2020 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2021 } else {
2022 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2023 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002024 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002025 EmitPropertyKey(property, lit->GetIdForProperty(i));
2026
2027 // The static prototype property is read only. We handle the non computed
2028 // property name case in the parser. Since this is the only case where we
2029 // need to check for an own read only property we special case this so we do
2030 // not need to do this for every property.
2031 if (property->is_static() && property->is_computed_name()) {
2032 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2033 __ push(r0);
2034 }
2035
2036 VisitForStackValue(value);
2037 if (NeedsHomeObject(value)) {
2038 EmitSetHomeObject(value, 2, property->GetSlot());
2039 }
2040
2041 switch (property->kind()) {
2042 case ObjectLiteral::Property::CONSTANT:
2043 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2044 case ObjectLiteral::Property::PROTOTYPE:
2045 UNREACHABLE();
2046 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002047 PushOperand(Smi::FromInt(DONT_ENUM));
2048 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2049 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002050 break;
2051
2052 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002053 PushOperand(Smi::FromInt(DONT_ENUM));
2054 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002055 break;
2056
2057 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002058 PushOperand(Smi::FromInt(DONT_ENUM));
2059 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002060 break;
2061
2062 default:
2063 UNREACHABLE();
2064 }
2065 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002066}
2067
2068
2069void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002070 PopOperand(r1);
2071 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002072 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2073 CallIC(code, expr->BinaryOperationFeedbackId());
2074 patch_site.EmitPatchInfo();
2075 context()->Plug(r0);
2076}
2077
2078
2079void FullCodeGenerator::EmitAssignment(Expression* expr,
2080 FeedbackVectorSlot slot) {
2081 DCHECK(expr->IsValidReferenceExpressionOrThis());
2082
2083 Property* prop = expr->AsProperty();
2084 LhsKind assign_type = Property::GetAssignType(prop);
2085
2086 switch (assign_type) {
2087 case VARIABLE: {
2088 Variable* var = expr->AsVariableProxy()->var();
2089 EffectContext context(this);
2090 EmitVariableAssignment(var, Token::ASSIGN, slot);
2091 break;
2092 }
2093 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002094 PushOperand(r0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002095 VisitForAccumulatorValue(prop->obj());
2096 __ Move(StoreDescriptor::ReceiverRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002097 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002098 __ mov(StoreDescriptor::NameRegister(),
2099 Operand(prop->key()->AsLiteral()->value()));
2100 EmitLoadStoreICSlot(slot);
2101 CallStoreIC();
2102 break;
2103 }
2104 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002105 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002106 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2107 VisitForAccumulatorValue(
2108 prop->obj()->AsSuperPropertyReference()->home_object());
2109 // stack: value, this; r0: home_object
2110 Register scratch = r2;
2111 Register scratch2 = r3;
2112 __ mov(scratch, result_register()); // home_object
2113 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2114 __ ldr(scratch2, MemOperand(sp, 0)); // this
2115 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2116 __ str(scratch, MemOperand(sp, 0)); // home_object
2117 // stack: this, home_object; r0: value
2118 EmitNamedSuperPropertyStore(prop);
2119 break;
2120 }
2121 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002122 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2124 VisitForStackValue(
2125 prop->obj()->AsSuperPropertyReference()->home_object());
2126 VisitForAccumulatorValue(prop->key());
2127 Register scratch = r2;
2128 Register scratch2 = r3;
2129 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2130 // stack: value, this, home_object; r0: key, r3: value
2131 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2132 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2133 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2134 __ str(scratch, MemOperand(sp, kPointerSize));
2135 __ str(r0, MemOperand(sp, 0));
2136 __ Move(r0, scratch2);
2137 // stack: this, home_object, key; r0: value.
2138 EmitKeyedSuperPropertyStore(prop);
2139 break;
2140 }
2141 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002142 PushOperand(r0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002143 VisitForStackValue(prop->obj());
2144 VisitForAccumulatorValue(prop->key());
2145 __ Move(StoreDescriptor::NameRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002146 PopOperands(StoreDescriptor::ValueRegister(),
2147 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002148 EmitLoadStoreICSlot(slot);
2149 Handle<Code> ic =
2150 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2151 CallIC(ic);
2152 break;
2153 }
2154 }
2155 context()->Plug(r0);
2156}
2157
2158
2159void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2160 Variable* var, MemOperand location) {
2161 __ str(result_register(), location);
2162 if (var->IsContextSlot()) {
2163 // RecordWrite may destroy all its register arguments.
2164 __ mov(r3, result_register());
2165 int offset = Context::SlotOffset(var->index());
2166 __ RecordWriteContextSlot(
2167 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2168 }
2169}
2170
2171
2172void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2173 FeedbackVectorSlot slot) {
2174 if (var->IsUnallocated()) {
2175 // Global var, const, or let.
2176 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2177 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2178 EmitLoadStoreICSlot(slot);
2179 CallStoreIC();
2180
2181 } else if (var->mode() == LET && op != Token::INIT) {
2182 // Non-initializing assignment to let variable needs a write barrier.
2183 DCHECK(!var->IsLookupSlot());
2184 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2185 Label assign;
2186 MemOperand location = VarOperand(var, r1);
2187 __ ldr(r3, location);
2188 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2189 __ b(ne, &assign);
2190 __ mov(r3, Operand(var->name()));
2191 __ push(r3);
2192 __ CallRuntime(Runtime::kThrowReferenceError);
2193 // Perform the assignment.
2194 __ bind(&assign);
2195 EmitStoreToStackLocalOrContextSlot(var, location);
2196
2197 } else if (var->mode() == CONST && op != Token::INIT) {
2198 // Assignment to const variable needs a write barrier.
2199 DCHECK(!var->IsLookupSlot());
2200 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2201 Label const_error;
2202 MemOperand location = VarOperand(var, r1);
2203 __ ldr(r3, location);
2204 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2205 __ b(ne, &const_error);
2206 __ mov(r3, Operand(var->name()));
2207 __ push(r3);
2208 __ CallRuntime(Runtime::kThrowReferenceError);
2209 __ bind(&const_error);
2210 __ CallRuntime(Runtime::kThrowConstAssignError);
2211
2212 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2213 // Initializing assignment to const {this} needs a write barrier.
2214 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2215 Label uninitialized_this;
2216 MemOperand location = VarOperand(var, r1);
2217 __ ldr(r3, location);
2218 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2219 __ b(eq, &uninitialized_this);
2220 __ mov(r0, Operand(var->name()));
2221 __ Push(r0);
2222 __ CallRuntime(Runtime::kThrowReferenceError);
2223 __ bind(&uninitialized_this);
2224 EmitStoreToStackLocalOrContextSlot(var, location);
2225
Ben Murdochc5610432016-08-08 18:44:38 +01002226 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002227 if (var->IsLookupSlot()) {
2228 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002229 __ Push(var->name());
2230 __ Push(r0);
2231 __ CallRuntime(is_strict(language_mode())
2232 ? Runtime::kStoreLookupSlot_Strict
2233 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002234 } else {
2235 // Assignment to var or initializing assignment to let/const in harmony
2236 // mode.
2237 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2238 MemOperand location = VarOperand(var, r1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002239 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002240 // Check for an uninitialized let binding.
2241 __ ldr(r2, location);
2242 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2243 __ Check(eq, kLetBindingReInitialization);
2244 }
2245 EmitStoreToStackLocalOrContextSlot(var, location);
2246 }
2247
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002248 } else {
2249 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2250 if (is_strict(language_mode())) {
2251 __ CallRuntime(Runtime::kThrowConstAssignError);
2252 }
2253 // Silently ignore store in sloppy mode.
2254 }
2255}
2256
2257
2258void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2259 // Assignment to a property, using a named store IC.
2260 Property* prop = expr->target()->AsProperty();
2261 DCHECK(prop != NULL);
2262 DCHECK(prop->key()->IsLiteral());
2263
2264 __ mov(StoreDescriptor::NameRegister(),
2265 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002266 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002267 EmitLoadStoreICSlot(expr->AssignmentSlot());
2268 CallStoreIC();
2269
Ben Murdochc5610432016-08-08 18:44:38 +01002270 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002271 context()->Plug(r0);
2272}
2273
2274
2275void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2276 // Assignment to named property of super.
2277 // r0 : value
2278 // stack : receiver ('this'), home_object
2279 DCHECK(prop != NULL);
2280 Literal* key = prop->key()->AsLiteral();
2281 DCHECK(key != NULL);
2282
Ben Murdoch097c5b22016-05-18 11:27:45 +01002283 PushOperand(key->value());
2284 PushOperand(r0);
2285 CallRuntimeWithOperands(is_strict(language_mode())
2286 ? Runtime::kStoreToSuper_Strict
2287 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002288}
2289
2290
2291void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2292 // Assignment to named property of super.
2293 // r0 : value
2294 // stack : receiver ('this'), home_object, key
2295 DCHECK(prop != NULL);
2296
Ben Murdoch097c5b22016-05-18 11:27:45 +01002297 PushOperand(r0);
2298 CallRuntimeWithOperands(is_strict(language_mode())
2299 ? Runtime::kStoreKeyedToSuper_Strict
2300 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002301}
2302
2303
2304void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2305 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002306 PopOperands(StoreDescriptor::ReceiverRegister(),
2307 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002308 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2309
2310 Handle<Code> ic =
2311 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2312 EmitLoadStoreICSlot(expr->AssignmentSlot());
2313 CallIC(ic);
2314
Ben Murdochc5610432016-08-08 18:44:38 +01002315 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002316 context()->Plug(r0);
2317}
2318
2319
2320void FullCodeGenerator::CallIC(Handle<Code> code,
2321 TypeFeedbackId ast_id) {
2322 ic_total_count_++;
2323 // All calls must have a predictable size in full-codegen code to ensure that
2324 // the debugger can patch them correctly.
2325 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2326 NEVER_INLINE_TARGET_ADDRESS);
2327}
2328
2329
2330// Code common for calls using the IC.
2331void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2332 Expression* callee = expr->expression();
2333
2334 // Get the target function.
2335 ConvertReceiverMode convert_mode;
2336 if (callee->IsVariableProxy()) {
2337 { StackValueContext context(this);
2338 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002339 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002340 }
2341 // Push undefined as receiver. This is patched in the method prologue if it
2342 // is a sloppy mode method.
2343 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002344 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002345 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2346 } else {
2347 // Load the function from the receiver.
2348 DCHECK(callee->IsProperty());
2349 DCHECK(!callee->AsProperty()->IsSuperAccess());
2350 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2351 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002352 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2353 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002354 // Push the target function under the receiver.
2355 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002356 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002357 __ str(r0, MemOperand(sp, kPointerSize));
2358 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2359 }
2360
2361 EmitCall(expr, convert_mode);
2362}
2363
2364
2365void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2366 Expression* callee = expr->expression();
2367 DCHECK(callee->IsProperty());
2368 Property* prop = callee->AsProperty();
2369 DCHECK(prop->IsSuperAccess());
2370 SetExpressionPosition(prop);
2371
2372 Literal* key = prop->key()->AsLiteral();
2373 DCHECK(!key->value()->IsSmi());
2374 // Load the function from the receiver.
2375 const Register scratch = r1;
2376 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2377 VisitForStackValue(super_ref->home_object());
2378 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002379 PushOperand(r0);
2380 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002381 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002382 PushOperand(scratch);
2383 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002384
2385 // Stack here:
2386 // - home_object
2387 // - this (receiver)
2388 // - this (receiver) <-- LoadFromSuper will pop here and below.
2389 // - home_object
2390 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002391 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002392 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002393
2394 // Replace home_object with target function.
2395 __ str(r0, MemOperand(sp, kPointerSize));
2396
2397 // Stack here:
2398 // - target function
2399 // - this (receiver)
2400 EmitCall(expr);
2401}
2402
2403
2404// Code common for calls using the IC.
2405void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2406 Expression* key) {
2407 // Load the key.
2408 VisitForAccumulatorValue(key);
2409
2410 Expression* callee = expr->expression();
2411
2412 // Load the function from the receiver.
2413 DCHECK(callee->IsProperty());
2414 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2415 __ Move(LoadDescriptor::NameRegister(), r0);
2416 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002417 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2418 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002419
2420 // Push the target function under the receiver.
2421 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002422 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002423 __ str(r0, MemOperand(sp, kPointerSize));
2424
2425 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2426}
2427
2428
2429void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2430 Expression* callee = expr->expression();
2431 DCHECK(callee->IsProperty());
2432 Property* prop = callee->AsProperty();
2433 DCHECK(prop->IsSuperAccess());
2434
2435 SetExpressionPosition(prop);
2436 // Load the function from the receiver.
2437 const Register scratch = r1;
2438 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2439 VisitForStackValue(super_ref->home_object());
2440 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002441 PushOperand(r0);
2442 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002443 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002444 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002445 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446
2447 // Stack here:
2448 // - home_object
2449 // - this (receiver)
2450 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2451 // - home_object
2452 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002453 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002454 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002455
2456 // Replace home_object with target function.
2457 __ str(r0, MemOperand(sp, kPointerSize));
2458
2459 // Stack here:
2460 // - target function
2461 // - this (receiver)
2462 EmitCall(expr);
2463}
2464
2465
2466void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2467 // Load the arguments.
2468 ZoneList<Expression*>* args = expr->arguments();
2469 int arg_count = args->length();
2470 for (int i = 0; i < arg_count; i++) {
2471 VisitForStackValue(args->at(i));
2472 }
2473
Ben Murdochc5610432016-08-08 18:44:38 +01002474 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002475 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002476 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2477 if (FLAG_trace) {
2478 __ CallRuntime(Runtime::kTraceTailCall);
2479 }
2480 // Update profiling counters before the tail call since we will
2481 // not return to this function.
2482 EmitProfilingCounterHandlingForReturnSequence(true);
2483 }
2484 Handle<Code> ic =
2485 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2486 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002487 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2488 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2489 // Don't assign a type feedback id to the IC, since type feedback is provided
2490 // by the vector above.
2491 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002492 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002493
2494 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002495 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002496 context()->DropAndPlug(1, r0);
2497}
2498
Ben Murdochc5610432016-08-08 18:44:38 +01002499void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2500 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002501 // r4: copy of the first argument or undefined if it doesn't exist.
2502 if (arg_count > 0) {
2503 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2504 } else {
2505 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2506 }
2507
2508 // r3: the receiver of the enclosing function.
2509 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2510
2511 // r2: language mode.
2512 __ mov(r2, Operand(Smi::FromInt(language_mode())));
2513
2514 // r1: the start position of the scope the calls resides in.
2515 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2516
Ben Murdochc5610432016-08-08 18:44:38 +01002517 // r0: the source position of the eval call.
2518 __ mov(r0, Operand(Smi::FromInt(expr->position())));
2519
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002520 // Do the runtime call.
Ben Murdochc5610432016-08-08 18:44:38 +01002521 __ Push(r4, r3, r2, r1, r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002522 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2523}
2524
2525
2526// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2527void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2528 VariableProxy* callee = expr->expression()->AsVariableProxy();
2529 if (callee->var()->IsLookupSlot()) {
2530 Label slow, done;
2531 SetExpressionPosition(callee);
2532 // Generate code for loading from variables potentially shadowed
2533 // by eval-introduced variables.
2534 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2535
2536 __ bind(&slow);
2537 // Call the runtime to find the function to call (returned in r0)
2538 // and the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002539 __ Push(callee->name());
2540 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2541 PushOperands(r0, r1); // Function, receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002542 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002543
2544 // If fast case code has been generated, emit code to push the
2545 // function and receiver and have the slow path jump around this
2546 // code.
2547 if (done.is_linked()) {
2548 Label call;
2549 __ b(&call);
2550 __ bind(&done);
2551 // Push function.
2552 __ push(r0);
2553 // The receiver is implicitly the global receiver. Indicate this
2554 // by passing the hole to the call function stub.
2555 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2556 __ push(r1);
2557 __ bind(&call);
2558 }
2559 } else {
2560 VisitForStackValue(callee);
2561 // refEnv.WithBaseObject()
2562 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002563 PushOperand(r2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002564 }
2565}
2566
2567
2568void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2569 // In a call to eval, we first call
Ben Murdochc5610432016-08-08 18:44:38 +01002570 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002571 // to call. Then we call the resolved function using the given arguments.
2572 ZoneList<Expression*>* args = expr->arguments();
2573 int arg_count = args->length();
2574
2575 PushCalleeAndWithBaseObject(expr);
2576
2577 // Push the arguments.
2578 for (int i = 0; i < arg_count; i++) {
2579 VisitForStackValue(args->at(i));
2580 }
2581
2582 // Push a copy of the function (found below the arguments) and
2583 // resolve eval.
2584 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2585 __ push(r1);
Ben Murdochc5610432016-08-08 18:44:38 +01002586 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002587
2588 // Touch up the stack with the resolved function.
2589 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2590
Ben Murdochc5610432016-08-08 18:44:38 +01002591 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002592
2593 // Record source position for debugger.
2594 SetCallPosition(expr);
2595 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2596 __ mov(r0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002597 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2598 expr->tail_call_mode()),
2599 RelocInfo::CODE_TARGET);
2600 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002601 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002602 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002603 context()->DropAndPlug(1, r0);
2604}
2605
2606
2607void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2608 Comment cmnt(masm_, "[ CallNew");
2609 // According to ECMA-262, section 11.2.2, page 44, the function
2610 // expression in new calls must be evaluated before the
2611 // arguments.
2612
2613 // Push constructor on the stack. If it's not a function it's used as
2614 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2615 // ignored.
2616 DCHECK(!expr->expression()->IsSuperPropertyReference());
2617 VisitForStackValue(expr->expression());
2618
2619 // Push the arguments ("left-to-right") on the stack.
2620 ZoneList<Expression*>* args = expr->arguments();
2621 int arg_count = args->length();
2622 for (int i = 0; i < arg_count; i++) {
2623 VisitForStackValue(args->at(i));
2624 }
2625
2626 // Call the construct call builtin that handles allocation and
2627 // constructor invocation.
2628 SetConstructCallPosition(expr);
2629
2630 // Load function and argument count into r1 and r0.
2631 __ mov(r0, Operand(arg_count));
2632 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2633
2634 // Record call targets in unoptimized code.
2635 __ EmitLoadTypeFeedbackVector(r2);
2636 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2637
2638 CallConstructStub stub(isolate());
2639 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002640 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002641 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2642 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002643 context()->Plug(r0);
2644}
2645
2646
2647void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2648 SuperCallReference* super_call_ref =
2649 expr->expression()->AsSuperCallReference();
2650 DCHECK_NOT_NULL(super_call_ref);
2651
2652 // Push the super constructor target on the stack (may be null,
2653 // but the Construct builtin can deal with that properly).
2654 VisitForAccumulatorValue(super_call_ref->this_function_var());
2655 __ AssertFunction(result_register());
2656 __ ldr(result_register(),
2657 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2658 __ ldr(result_register(),
2659 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002660 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002661
2662 // Push the arguments ("left-to-right") on the stack.
2663 ZoneList<Expression*>* args = expr->arguments();
2664 int arg_count = args->length();
2665 for (int i = 0; i < arg_count; i++) {
2666 VisitForStackValue(args->at(i));
2667 }
2668
2669 // Call the construct call builtin that handles allocation and
2670 // constructor invocation.
2671 SetConstructCallPosition(expr);
2672
2673 // Load new target into r3.
2674 VisitForAccumulatorValue(super_call_ref->new_target_var());
2675 __ mov(r3, result_register());
2676
2677 // Load function and argument count into r1 and r0.
2678 __ mov(r0, Operand(arg_count));
2679 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2680
2681 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002682 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002683
2684 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002685 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002686 context()->Plug(r0);
2687}
2688
2689
2690void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2691 ZoneList<Expression*>* args = expr->arguments();
2692 DCHECK(args->length() == 1);
2693
2694 VisitForAccumulatorValue(args->at(0));
2695
2696 Label materialize_true, materialize_false;
2697 Label* if_true = NULL;
2698 Label* if_false = NULL;
2699 Label* fall_through = NULL;
2700 context()->PrepareTest(&materialize_true, &materialize_false,
2701 &if_true, &if_false, &fall_through);
2702
2703 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2704 __ SmiTst(r0);
2705 Split(eq, if_true, if_false, fall_through);
2706
2707 context()->Plug(if_true, if_false);
2708}
2709
2710
2711void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2712 ZoneList<Expression*>* args = expr->arguments();
2713 DCHECK(args->length() == 1);
2714
2715 VisitForAccumulatorValue(args->at(0));
2716
2717 Label materialize_true, materialize_false;
2718 Label* if_true = NULL;
2719 Label* if_false = NULL;
2720 Label* fall_through = NULL;
2721 context()->PrepareTest(&materialize_true, &materialize_false,
2722 &if_true, &if_false, &fall_through);
2723
2724 __ JumpIfSmi(r0, if_false);
2725 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
2726 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2727 Split(ge, if_true, if_false, fall_through);
2728
2729 context()->Plug(if_true, if_false);
2730}
2731
2732
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002733void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2734 ZoneList<Expression*>* args = expr->arguments();
2735 DCHECK(args->length() == 1);
2736
2737 VisitForAccumulatorValue(args->at(0));
2738
2739 Label materialize_true, materialize_false;
2740 Label* if_true = NULL;
2741 Label* if_false = NULL;
2742 Label* fall_through = NULL;
2743 context()->PrepareTest(&materialize_true, &materialize_false,
2744 &if_true, &if_false, &fall_through);
2745
2746 __ JumpIfSmi(r0, if_false);
2747 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2748 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2749 Split(eq, if_true, if_false, fall_through);
2750
2751 context()->Plug(if_true, if_false);
2752}
2753
2754
2755void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2756 ZoneList<Expression*>* args = expr->arguments();
2757 DCHECK(args->length() == 1);
2758
2759 VisitForAccumulatorValue(args->at(0));
2760
2761 Label materialize_true, materialize_false;
2762 Label* if_true = NULL;
2763 Label* if_false = NULL;
2764 Label* fall_through = NULL;
2765 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2766 &if_false, &fall_through);
2767
2768 __ JumpIfSmi(r0, if_false);
2769 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
2770 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2771 Split(eq, if_true, if_false, fall_through);
2772
2773 context()->Plug(if_true, if_false);
2774}
2775
2776
2777void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2778 ZoneList<Expression*>* args = expr->arguments();
2779 DCHECK(args->length() == 1);
2780
2781 VisitForAccumulatorValue(args->at(0));
2782
2783 Label materialize_true, materialize_false;
2784 Label* if_true = NULL;
2785 Label* if_false = NULL;
2786 Label* fall_through = NULL;
2787 context()->PrepareTest(&materialize_true, &materialize_false,
2788 &if_true, &if_false, &fall_through);
2789
2790 __ JumpIfSmi(r0, if_false);
2791 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2792 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2793 Split(eq, if_true, if_false, fall_through);
2794
2795 context()->Plug(if_true, if_false);
2796}
2797
2798
2799void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2800 ZoneList<Expression*>* args = expr->arguments();
2801 DCHECK(args->length() == 1);
2802
2803 VisitForAccumulatorValue(args->at(0));
2804
2805 Label materialize_true, materialize_false;
2806 Label* if_true = NULL;
2807 Label* if_false = NULL;
2808 Label* fall_through = NULL;
2809 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2810 &if_false, &fall_through);
2811
2812 __ JumpIfSmi(r0, if_false);
2813 __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
2814 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2815 Split(eq, if_true, if_false, fall_through);
2816
2817 context()->Plug(if_true, if_false);
2818}
2819
2820
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002821void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2822 ZoneList<Expression*>* args = expr->arguments();
2823 DCHECK(args->length() == 1);
2824 Label done, null, function, non_function_constructor;
2825
2826 VisitForAccumulatorValue(args->at(0));
2827
2828 // If the object is not a JSReceiver, we return null.
2829 __ JumpIfSmi(r0, &null);
2830 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2831 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
2832 // Map is now in r0.
2833 __ b(lt, &null);
2834
Ben Murdochda12d292016-06-02 14:46:10 +01002835 // Return 'Function' for JSFunction and JSBoundFunction objects.
2836 __ cmp(r1, Operand(FIRST_FUNCTION_TYPE));
2837 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2838 __ b(hs, &function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002839
2840 // Check if the constructor in the map is a JS function.
2841 Register instance_type = r2;
2842 __ GetMapConstructor(r0, r0, r1, instance_type);
2843 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
2844 __ b(ne, &non_function_constructor);
2845
2846 // r0 now contains the constructor function. Grab the
2847 // instance class name from there.
2848 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2849 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2850 __ b(&done);
2851
2852 // Functions have class 'Function'.
2853 __ bind(&function);
2854 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
2855 __ jmp(&done);
2856
2857 // Objects with a non-function constructor have class 'Object'.
2858 __ bind(&non_function_constructor);
2859 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
2860 __ jmp(&done);
2861
2862 // Non-JS objects have class null.
2863 __ bind(&null);
2864 __ LoadRoot(r0, Heap::kNullValueRootIndex);
2865
2866 // All done.
2867 __ bind(&done);
2868
2869 context()->Plug(r0);
2870}
2871
2872
2873void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2874 ZoneList<Expression*>* args = expr->arguments();
2875 DCHECK(args->length() == 1);
2876 VisitForAccumulatorValue(args->at(0)); // Load the object.
2877
2878 Label done;
2879 // If the object is a smi return the object.
2880 __ JumpIfSmi(r0, &done);
2881 // If the object is not a value type, return the object.
2882 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
2883 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
2884
2885 __ bind(&done);
2886 context()->Plug(r0);
2887}
2888
2889
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002890void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2891 ZoneList<Expression*>* args = expr->arguments();
2892 DCHECK(args->length() == 1);
2893 VisitForAccumulatorValue(args->at(0));
2894
2895 Label done;
2896 StringCharFromCodeGenerator generator(r0, r1);
2897 generator.GenerateFast(masm_);
2898 __ jmp(&done);
2899
2900 NopRuntimeCallHelper call_helper;
2901 generator.GenerateSlow(masm_, call_helper);
2902
2903 __ bind(&done);
2904 context()->Plug(r1);
2905}
2906
2907
2908void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2909 ZoneList<Expression*>* args = expr->arguments();
2910 DCHECK(args->length() == 2);
2911 VisitForStackValue(args->at(0));
2912 VisitForAccumulatorValue(args->at(1));
2913
2914 Register object = r1;
2915 Register index = r0;
2916 Register result = r3;
2917
Ben Murdoch097c5b22016-05-18 11:27:45 +01002918 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002919
2920 Label need_conversion;
2921 Label index_out_of_range;
2922 Label done;
Ben Murdoch61f157c2016-09-16 13:49:30 +01002923 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2924 &need_conversion, &index_out_of_range);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002925 generator.GenerateFast(masm_);
2926 __ jmp(&done);
2927
2928 __ bind(&index_out_of_range);
2929 // When the index is out of range, the spec requires us to return
2930 // NaN.
2931 __ LoadRoot(result, Heap::kNanValueRootIndex);
2932 __ jmp(&done);
2933
2934 __ bind(&need_conversion);
2935 // Load the undefined value into the result register, which will
2936 // trigger conversion.
2937 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2938 __ jmp(&done);
2939
2940 NopRuntimeCallHelper call_helper;
2941 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2942
2943 __ bind(&done);
2944 context()->Plug(result);
2945}
2946
2947
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002948void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2949 ZoneList<Expression*>* args = expr->arguments();
2950 DCHECK_LE(2, args->length());
2951 // Push target, receiver and arguments onto the stack.
2952 for (Expression* const arg : *args) {
2953 VisitForStackValue(arg);
2954 }
Ben Murdochc5610432016-08-08 18:44:38 +01002955 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002956 // Move target to r1.
2957 int const argc = args->length() - 2;
2958 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
2959 // Call the target.
2960 __ mov(r0, Operand(argc));
2961 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002962 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002963 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002964 // Discard the function left on TOS.
2965 context()->DropAndPlug(1, r0);
2966}
2967
2968
2969void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2970 ZoneList<Expression*>* args = expr->arguments();
2971 VisitForAccumulatorValue(args->at(0));
2972
2973 Label materialize_true, materialize_false;
2974 Label* if_true = NULL;
2975 Label* if_false = NULL;
2976 Label* fall_through = NULL;
2977 context()->PrepareTest(&materialize_true, &materialize_false,
2978 &if_true, &if_false, &fall_through);
2979
2980 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
2981 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
2982 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2983 Split(eq, if_true, if_false, fall_through);
2984
2985 context()->Plug(if_true, if_false);
2986}
2987
2988
2989void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
2990 ZoneList<Expression*>* args = expr->arguments();
2991 DCHECK(args->length() == 1);
2992 VisitForAccumulatorValue(args->at(0));
2993
2994 __ AssertString(r0);
2995
2996 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
2997 __ IndexFromHash(r0, r0);
2998
2999 context()->Plug(r0);
3000}
3001
3002
3003void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3004 ZoneList<Expression*>* args = expr->arguments();
3005 DCHECK_EQ(1, args->length());
3006 VisitForAccumulatorValue(args->at(0));
3007 __ AssertFunction(r0);
3008 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3009 __ ldr(r0, FieldMemOperand(r0, Map::kPrototypeOffset));
3010 context()->Plug(r0);
3011}
3012
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003013void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3014 DCHECK(expr->arguments()->length() == 0);
3015 ExternalReference debug_is_active =
3016 ExternalReference::debug_is_active_address(isolate());
3017 __ mov(ip, Operand(debug_is_active));
3018 __ ldrb(r0, MemOperand(ip));
3019 __ SmiTag(r0);
3020 context()->Plug(r0);
3021}
3022
3023
3024void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3025 ZoneList<Expression*>* args = expr->arguments();
3026 DCHECK_EQ(2, args->length());
3027 VisitForStackValue(args->at(0));
3028 VisitForStackValue(args->at(1));
3029
3030 Label runtime, done;
3031
Ben Murdochc5610432016-08-08 18:44:38 +01003032 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime,
3033 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003034 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
3035 __ pop(r3);
3036 __ pop(r2);
3037 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
3038 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3039 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3040 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
3041 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
3042 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
3043 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3044 __ b(&done);
3045
3046 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003047 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003048
3049 __ bind(&done);
3050 context()->Plug(r0);
3051}
3052
3053
3054void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003055 // Push function.
3056 __ LoadNativeContextSlot(expr->context_index(), r0);
3057 PushOperand(r0);
3058
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003059 // Push undefined as the receiver.
3060 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003061 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003062}
3063
3064
3065void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3066 ZoneList<Expression*>* args = expr->arguments();
3067 int arg_count = args->length();
3068
3069 SetCallPosition(expr);
3070 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3071 __ mov(r0, Operand(arg_count));
3072 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3073 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003074 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003075 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003076}
3077
3078
3079void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3080 switch (expr->op()) {
3081 case Token::DELETE: {
3082 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3083 Property* property = expr->expression()->AsProperty();
3084 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3085
3086 if (property != NULL) {
3087 VisitForStackValue(property->obj());
3088 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003089 CallRuntimeWithOperands(is_strict(language_mode())
3090 ? Runtime::kDeleteProperty_Strict
3091 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003092 context()->Plug(r0);
3093 } else if (proxy != NULL) {
3094 Variable* var = proxy->var();
3095 // Delete of an unqualified identifier is disallowed in strict mode but
3096 // "delete this" is allowed.
3097 bool is_this = var->HasThisName(isolate());
3098 DCHECK(is_sloppy(language_mode()) || is_this);
3099 if (var->IsUnallocatedOrGlobalSlot()) {
3100 __ LoadGlobalObject(r2);
3101 __ mov(r1, Operand(var->name()));
3102 __ Push(r2, r1);
3103 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3104 context()->Plug(r0);
3105 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3106 // Result of deleting non-global, non-dynamic variables is false.
3107 // The subexpression does not have side effects.
3108 context()->Plug(is_this);
3109 } else {
3110 // Non-global variable. Call the runtime to try to delete from the
3111 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003112 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003113 __ CallRuntime(Runtime::kDeleteLookupSlot);
3114 context()->Plug(r0);
3115 }
3116 } else {
3117 // Result of deleting non-property, non-variable reference is true.
3118 // The subexpression may have side effects.
3119 VisitForEffect(expr->expression());
3120 context()->Plug(true);
3121 }
3122 break;
3123 }
3124
3125 case Token::VOID: {
3126 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3127 VisitForEffect(expr->expression());
3128 context()->Plug(Heap::kUndefinedValueRootIndex);
3129 break;
3130 }
3131
3132 case Token::NOT: {
3133 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3134 if (context()->IsEffect()) {
3135 // Unary NOT has no side effects so it's only necessary to visit the
3136 // subexpression. Match the optimizing compiler by not branching.
3137 VisitForEffect(expr->expression());
3138 } else if (context()->IsTest()) {
3139 const TestContext* test = TestContext::cast(context());
3140 // The labels are swapped for the recursive call.
3141 VisitForControl(expr->expression(),
3142 test->false_label(),
3143 test->true_label(),
3144 test->fall_through());
3145 context()->Plug(test->true_label(), test->false_label());
3146 } else {
3147 // We handle value contexts explicitly rather than simply visiting
3148 // for control and plugging the control flow into the context,
3149 // because we need to prepare a pair of extra administrative AST ids
3150 // for the optimizing compiler.
3151 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3152 Label materialize_true, materialize_false, done;
3153 VisitForControl(expr->expression(),
3154 &materialize_false,
3155 &materialize_true,
3156 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003157 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003158 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003159 PrepareForBailoutForId(expr->MaterializeTrueId(),
3160 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003161 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3162 if (context()->IsStackValue()) __ push(r0);
3163 __ jmp(&done);
3164 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003165 PrepareForBailoutForId(expr->MaterializeFalseId(),
3166 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003167 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3168 if (context()->IsStackValue()) __ push(r0);
3169 __ bind(&done);
3170 }
3171 break;
3172 }
3173
3174 case Token::TYPEOF: {
3175 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3176 {
3177 AccumulatorValueContext context(this);
3178 VisitForTypeofValue(expr->expression());
3179 }
3180 __ mov(r3, r0);
3181 TypeofStub typeof_stub(isolate());
3182 __ CallStub(&typeof_stub);
3183 context()->Plug(r0);
3184 break;
3185 }
3186
3187 default:
3188 UNREACHABLE();
3189 }
3190}
3191
3192
3193void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3194 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3195
3196 Comment cmnt(masm_, "[ CountOperation");
3197
3198 Property* prop = expr->expression()->AsProperty();
3199 LhsKind assign_type = Property::GetAssignType(prop);
3200
3201 // Evaluate expression and get value.
3202 if (assign_type == VARIABLE) {
3203 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3204 AccumulatorValueContext context(this);
3205 EmitVariableLoad(expr->expression()->AsVariableProxy());
3206 } else {
3207 // Reserve space for result of postfix operation.
3208 if (expr->is_postfix() && !context()->IsEffect()) {
3209 __ mov(ip, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003210 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003211 }
3212 switch (assign_type) {
3213 case NAMED_PROPERTY: {
3214 // Put the object both on the stack and in the register.
3215 VisitForStackValue(prop->obj());
3216 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3217 EmitNamedPropertyLoad(prop);
3218 break;
3219 }
3220
3221 case NAMED_SUPER_PROPERTY: {
3222 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3223 VisitForAccumulatorValue(
3224 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003225 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003226 const Register scratch = r1;
3227 __ ldr(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003228 PushOperand(scratch);
3229 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003230 EmitNamedSuperPropertyLoad(prop);
3231 break;
3232 }
3233
3234 case KEYED_SUPER_PROPERTY: {
3235 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3236 VisitForStackValue(
3237 prop->obj()->AsSuperPropertyReference()->home_object());
3238 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003239 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003240 const Register scratch = r1;
3241 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003242 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003243 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003244 PushOperand(scratch);
3245 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003246 EmitKeyedSuperPropertyLoad(prop);
3247 break;
3248 }
3249
3250 case KEYED_PROPERTY: {
3251 VisitForStackValue(prop->obj());
3252 VisitForStackValue(prop->key());
3253 __ ldr(LoadDescriptor::ReceiverRegister(),
3254 MemOperand(sp, 1 * kPointerSize));
3255 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3256 EmitKeyedPropertyLoad(prop);
3257 break;
3258 }
3259
3260 case VARIABLE:
3261 UNREACHABLE();
3262 }
3263 }
3264
3265 // We need a second deoptimization point after loading the value
3266 // in case evaluating the property load my have a side effect.
3267 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003268 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003269 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003270 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003271 }
3272
3273 // Inline smi case if we are in a loop.
3274 Label stub_call, done;
3275 JumpPatchSite patch_site(masm_);
3276
3277 int count_value = expr->op() == Token::INC ? 1 : -1;
3278 if (ShouldInlineSmiCase(expr->op())) {
3279 Label slow;
3280 patch_site.EmitJumpIfNotSmi(r0, &slow);
3281
3282 // Save result for postfix expressions.
3283 if (expr->is_postfix()) {
3284 if (!context()->IsEffect()) {
3285 // Save the result on the stack. If we have a named or keyed property
3286 // we store the result under the receiver that is currently on top
3287 // of the stack.
3288 switch (assign_type) {
3289 case VARIABLE:
3290 __ push(r0);
3291 break;
3292 case NAMED_PROPERTY:
3293 __ str(r0, MemOperand(sp, kPointerSize));
3294 break;
3295 case NAMED_SUPER_PROPERTY:
3296 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3297 break;
3298 case KEYED_PROPERTY:
3299 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3300 break;
3301 case KEYED_SUPER_PROPERTY:
3302 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3303 break;
3304 }
3305 }
3306 }
3307
3308 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
3309 __ b(vc, &done);
3310 // Call stub. Undo operation first.
3311 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
3312 __ jmp(&stub_call);
3313 __ bind(&slow);
3314 }
Ben Murdochda12d292016-06-02 14:46:10 +01003315
3316 // Convert old value into a number.
Ben Murdoch61f157c2016-09-16 13:49:30 +01003317 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdochc5610432016-08-08 18:44:38 +01003318 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003319
3320 // Save result for postfix expressions.
3321 if (expr->is_postfix()) {
3322 if (!context()->IsEffect()) {
3323 // Save the result on the stack. If we have a named or keyed property
3324 // we store the result under the receiver that is currently on top
3325 // of the stack.
3326 switch (assign_type) {
3327 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003328 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003329 break;
3330 case NAMED_PROPERTY:
3331 __ str(r0, MemOperand(sp, kPointerSize));
3332 break;
3333 case NAMED_SUPER_PROPERTY:
3334 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3335 break;
3336 case KEYED_PROPERTY:
3337 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3338 break;
3339 case KEYED_SUPER_PROPERTY:
3340 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3341 break;
3342 }
3343 }
3344 }
3345
3346
3347 __ bind(&stub_call);
3348 __ mov(r1, r0);
3349 __ mov(r0, Operand(Smi::FromInt(count_value)));
3350
3351 SetExpressionPosition(expr);
3352
Ben Murdoch097c5b22016-05-18 11:27:45 +01003353 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003354 CallIC(code, expr->CountBinOpFeedbackId());
3355 patch_site.EmitPatchInfo();
3356 __ bind(&done);
3357
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003358 // Store the value returned in r0.
3359 switch (assign_type) {
3360 case VARIABLE:
3361 if (expr->is_postfix()) {
3362 { EffectContext context(this);
3363 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3364 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003365 PrepareForBailoutForId(expr->AssignmentId(),
3366 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003367 context.Plug(r0);
3368 }
3369 // For all contexts except EffectConstant We have the result on
3370 // top of the stack.
3371 if (!context()->IsEffect()) {
3372 context()->PlugTOS();
3373 }
3374 } else {
3375 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3376 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003377 PrepareForBailoutForId(expr->AssignmentId(),
3378 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003379 context()->Plug(r0);
3380 }
3381 break;
3382 case NAMED_PROPERTY: {
3383 __ mov(StoreDescriptor::NameRegister(),
3384 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003385 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003386 EmitLoadStoreICSlot(expr->CountSlot());
3387 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003388 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003389 if (expr->is_postfix()) {
3390 if (!context()->IsEffect()) {
3391 context()->PlugTOS();
3392 }
3393 } else {
3394 context()->Plug(r0);
3395 }
3396 break;
3397 }
3398 case NAMED_SUPER_PROPERTY: {
3399 EmitNamedSuperPropertyStore(prop);
3400 if (expr->is_postfix()) {
3401 if (!context()->IsEffect()) {
3402 context()->PlugTOS();
3403 }
3404 } else {
3405 context()->Plug(r0);
3406 }
3407 break;
3408 }
3409 case KEYED_SUPER_PROPERTY: {
3410 EmitKeyedSuperPropertyStore(prop);
3411 if (expr->is_postfix()) {
3412 if (!context()->IsEffect()) {
3413 context()->PlugTOS();
3414 }
3415 } else {
3416 context()->Plug(r0);
3417 }
3418 break;
3419 }
3420 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003421 PopOperands(StoreDescriptor::ReceiverRegister(),
3422 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003423 Handle<Code> ic =
3424 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3425 EmitLoadStoreICSlot(expr->CountSlot());
3426 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003427 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003428 if (expr->is_postfix()) {
3429 if (!context()->IsEffect()) {
3430 context()->PlugTOS();
3431 }
3432 } else {
3433 context()->Plug(r0);
3434 }
3435 break;
3436 }
3437 }
3438}
3439
3440
3441void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3442 Expression* sub_expr,
3443 Handle<String> check) {
3444 Label materialize_true, materialize_false;
3445 Label* if_true = NULL;
3446 Label* if_false = NULL;
3447 Label* fall_through = NULL;
3448 context()->PrepareTest(&materialize_true, &materialize_false,
3449 &if_true, &if_false, &fall_through);
3450
3451 { AccumulatorValueContext context(this);
3452 VisitForTypeofValue(sub_expr);
3453 }
3454 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3455
3456 Factory* factory = isolate()->factory();
3457 if (String::Equals(check, factory->number_string())) {
3458 __ JumpIfSmi(r0, if_true);
3459 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3460 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3461 __ cmp(r0, ip);
3462 Split(eq, if_true, if_false, fall_through);
3463 } else if (String::Equals(check, factory->string_string())) {
3464 __ JumpIfSmi(r0, if_false);
3465 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
3466 Split(lt, if_true, if_false, fall_through);
3467 } else if (String::Equals(check, factory->symbol_string())) {
3468 __ JumpIfSmi(r0, if_false);
3469 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
3470 Split(eq, if_true, if_false, fall_through);
3471 } else if (String::Equals(check, factory->boolean_string())) {
3472 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3473 __ b(eq, if_true);
3474 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
3475 Split(eq, if_true, if_false, fall_through);
3476 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003477 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3478 __ b(eq, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003479 __ JumpIfSmi(r0, if_false);
3480 // Check for undetectable objects => true.
3481 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3482 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3483 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3484 Split(ne, if_true, if_false, fall_through);
3485
3486 } else if (String::Equals(check, factory->function_string())) {
3487 __ JumpIfSmi(r0, if_false);
3488 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3489 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3490 __ and_(r1, r1,
3491 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3492 __ cmp(r1, Operand(1 << Map::kIsCallable));
3493 Split(eq, if_true, if_false, fall_through);
3494 } else if (String::Equals(check, factory->object_string())) {
3495 __ JumpIfSmi(r0, if_false);
3496 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3497 __ b(eq, if_true);
3498 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3499 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
3500 __ b(lt, if_false);
3501 // Check for callable or undetectable objects => false.
3502 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3503 __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3504 Split(eq, if_true, if_false, fall_through);
3505// clang-format off
3506#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3507 } else if (String::Equals(check, factory->type##_string())) { \
3508 __ JumpIfSmi(r0, if_false); \
3509 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); \
3510 __ CompareRoot(r0, Heap::k##Type##MapRootIndex); \
3511 Split(eq, if_true, if_false, fall_through);
3512 SIMD128_TYPES(SIMD128_TYPE)
3513#undef SIMD128_TYPE
3514 // clang-format on
3515 } else {
3516 if (if_false != fall_through) __ jmp(if_false);
3517 }
3518 context()->Plug(if_true, if_false);
3519}
3520
3521
3522void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3523 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003524
3525 // First we try a fast inlined version of the compare when one of
3526 // the operands is a literal.
3527 if (TryLiteralCompare(expr)) return;
3528
3529 // Always perform the comparison for its control flow. Pack the result
3530 // into the expression's context after the comparison is performed.
3531 Label materialize_true, materialize_false;
3532 Label* if_true = NULL;
3533 Label* if_false = NULL;
3534 Label* fall_through = NULL;
3535 context()->PrepareTest(&materialize_true, &materialize_false,
3536 &if_true, &if_false, &fall_through);
3537
3538 Token::Value op = expr->op();
3539 VisitForStackValue(expr->left());
3540 switch (op) {
3541 case Token::IN:
3542 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003543 SetExpressionPosition(expr);
3544 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003545 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3546 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3547 Split(eq, if_true, if_false, fall_through);
3548 break;
3549
3550 case Token::INSTANCEOF: {
3551 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003552 SetExpressionPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003553 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003554 InstanceOfStub stub(isolate());
3555 __ CallStub(&stub);
3556 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3557 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3558 Split(eq, if_true, if_false, fall_through);
3559 break;
3560 }
3561
3562 default: {
3563 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003564 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003565 Condition cond = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003566 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003567
3568 bool inline_smi_code = ShouldInlineSmiCase(op);
3569 JumpPatchSite patch_site(masm_);
3570 if (inline_smi_code) {
3571 Label slow_case;
3572 __ orr(r2, r0, Operand(r1));
3573 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
3574 __ cmp(r1, r0);
3575 Split(cond, if_true, if_false, NULL);
3576 __ bind(&slow_case);
3577 }
3578
Ben Murdoch097c5b22016-05-18 11:27:45 +01003579 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003580 CallIC(ic, expr->CompareOperationFeedbackId());
3581 patch_site.EmitPatchInfo();
3582 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3583 __ cmp(r0, Operand::Zero());
3584 Split(cond, if_true, if_false, fall_through);
3585 }
3586 }
3587
3588 // Convert the result of the comparison into one expected for this
3589 // expression's context.
3590 context()->Plug(if_true, if_false);
3591}
3592
3593
3594void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3595 Expression* sub_expr,
3596 NilValue nil) {
3597 Label materialize_true, materialize_false;
3598 Label* if_true = NULL;
3599 Label* if_false = NULL;
3600 Label* fall_through = NULL;
3601 context()->PrepareTest(&materialize_true, &materialize_false,
3602 &if_true, &if_false, &fall_through);
3603
3604 VisitForAccumulatorValue(sub_expr);
3605 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3606 if (expr->op() == Token::EQ_STRICT) {
3607 Heap::RootListIndex nil_value = nil == kNullValue ?
3608 Heap::kNullValueRootIndex :
3609 Heap::kUndefinedValueRootIndex;
3610 __ LoadRoot(r1, nil_value);
3611 __ cmp(r0, r1);
3612 Split(eq, if_true, if_false, fall_through);
3613 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003614 __ JumpIfSmi(r0, if_false);
3615 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3616 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3617 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3618 Split(ne, if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003619 }
3620 context()->Plug(if_true, if_false);
3621}
3622
3623
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003624Register FullCodeGenerator::result_register() {
3625 return r0;
3626}
3627
3628
3629Register FullCodeGenerator::context_register() {
3630 return cp;
3631}
3632
Ben Murdochda12d292016-06-02 14:46:10 +01003633void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3634 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3635 __ ldr(value, MemOperand(fp, frame_offset));
3636}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003637
3638void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3639 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3640 __ str(value, MemOperand(fp, frame_offset));
3641}
3642
3643
3644void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3645 __ ldr(dst, ContextMemOperand(cp, context_index));
3646}
3647
3648
3649void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3650 Scope* closure_scope = scope()->ClosureScope();
3651 if (closure_scope->is_script_scope() ||
3652 closure_scope->is_module_scope()) {
3653 // Contexts nested in the native context have a canonical empty function
3654 // as their closure, not the anonymous closure containing the global
3655 // code.
3656 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3657 } else if (closure_scope->is_eval_scope()) {
3658 // Contexts created by a call to eval have the same closure as the
3659 // context calling eval, not the anonymous closure containing the eval
3660 // code. Fetch it from the context.
3661 __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3662 } else {
3663 DCHECK(closure_scope->is_function_scope());
3664 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3665 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003666 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003667}
3668
3669
3670// ----------------------------------------------------------------------------
3671// Non-local control flow support.
3672
3673void FullCodeGenerator::EnterFinallyBlock() {
3674 DCHECK(!result_register().is(r1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003675 // Store pending message while executing finally block.
3676 ExternalReference pending_message_obj =
3677 ExternalReference::address_of_pending_message_obj(isolate());
3678 __ mov(ip, Operand(pending_message_obj));
3679 __ ldr(r1, MemOperand(ip));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003680 PushOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003681
3682 ClearPendingMessage();
3683}
3684
3685
3686void FullCodeGenerator::ExitFinallyBlock() {
3687 DCHECK(!result_register().is(r1));
3688 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003689 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003690 ExternalReference pending_message_obj =
3691 ExternalReference::address_of_pending_message_obj(isolate());
3692 __ mov(ip, Operand(pending_message_obj));
3693 __ str(r1, MemOperand(ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003694}
3695
3696
3697void FullCodeGenerator::ClearPendingMessage() {
3698 DCHECK(!result_register().is(r1));
3699 ExternalReference pending_message_obj =
3700 ExternalReference::address_of_pending_message_obj(isolate());
3701 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
3702 __ mov(ip, Operand(pending_message_obj));
3703 __ str(r1, MemOperand(ip));
3704}
3705
3706
Ben Murdoch097c5b22016-05-18 11:27:45 +01003707void FullCodeGenerator::DeferredCommands::EmitCommands() {
3708 DCHECK(!result_register().is(r1));
3709 __ Pop(result_register()); // Restore the accumulator.
3710 __ Pop(r1); // Get the token.
3711 for (DeferredCommand cmd : commands_) {
3712 Label skip;
3713 __ cmp(r1, Operand(Smi::FromInt(cmd.token)));
3714 __ b(ne, &skip);
3715 switch (cmd.command) {
3716 case kReturn:
3717 codegen_->EmitUnwindAndReturn();
3718 break;
3719 case kThrow:
3720 __ Push(result_register());
3721 __ CallRuntime(Runtime::kReThrow);
3722 break;
3723 case kContinue:
3724 codegen_->EmitContinue(cmd.target);
3725 break;
3726 case kBreak:
3727 codegen_->EmitBreak(cmd.target);
3728 break;
3729 }
3730 __ bind(&skip);
3731 }
3732}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003733
3734#undef __
3735
3736
3737static Address GetInterruptImmediateLoadAddress(Address pc) {
3738 Address load_address = pc - 2 * Assembler::kInstrSize;
3739 if (!FLAG_enable_embedded_constant_pool) {
3740 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
3741 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
3742 // This is an extended constant pool lookup.
3743 if (CpuFeatures::IsSupported(ARMv7)) {
3744 load_address -= 2 * Assembler::kInstrSize;
3745 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
3746 DCHECK(Assembler::IsMovT(
3747 Memory::int32_at(load_address + Assembler::kInstrSize)));
3748 } else {
3749 load_address -= 4 * Assembler::kInstrSize;
3750 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
3751 DCHECK(Assembler::IsOrrImmed(
3752 Memory::int32_at(load_address + Assembler::kInstrSize)));
3753 DCHECK(Assembler::IsOrrImmed(
3754 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
3755 DCHECK(Assembler::IsOrrImmed(
3756 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
3757 }
3758 } else if (CpuFeatures::IsSupported(ARMv7) &&
3759 Assembler::IsMovT(Memory::int32_at(load_address))) {
3760 // This is a movw / movt immediate load.
3761 load_address -= Assembler::kInstrSize;
3762 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
3763 } else if (!CpuFeatures::IsSupported(ARMv7) &&
3764 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
3765 // This is a mov / orr immediate load.
3766 load_address -= 3 * Assembler::kInstrSize;
3767 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
3768 DCHECK(Assembler::IsOrrImmed(
3769 Memory::int32_at(load_address + Assembler::kInstrSize)));
3770 DCHECK(Assembler::IsOrrImmed(
3771 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
3772 } else {
3773 // This is a small constant pool lookup.
3774 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
3775 }
3776 return load_address;
3777}
3778
3779
3780void BackEdgeTable::PatchAt(Code* unoptimized_code,
3781 Address pc,
3782 BackEdgeState target_state,
3783 Code* replacement_code) {
3784 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
3785 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
3786 Isolate* isolate = unoptimized_code->GetIsolate();
3787 CodePatcher patcher(isolate, branch_address, 1);
3788 switch (target_state) {
3789 case INTERRUPT:
3790 {
3791 // <decrement profiling counter>
3792 // bpl ok
3793 // ; load interrupt stub address into ip - either of (for ARMv7):
3794 // ; <small cp load> | <extended cp load> | <immediate load>
3795 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
3796 // | movt ip, #imm | movw ip, #imm
3797 // | ldr ip, [pp, ip]
3798 // ; or (for ARMv6):
3799 // ; <small cp load> | <extended cp load> | <immediate load>
3800 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
3801 // | orr ip, ip, #imm> | orr ip, ip, #imm
3802 // | orr ip, ip, #imm> | orr ip, ip, #imm
3803 // | orr ip, ip, #imm> | orr ip, ip, #imm
3804 // blx ip
3805 // <reset profiling counter>
3806 // ok-label
3807
3808 // Calculate branch offset to the ok-label - this is the difference
3809 // between the branch address and |pc| (which points at <blx ip>) plus
3810 // kProfileCounterResetSequence instructions
3811 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
3812 kProfileCounterResetSequenceLength;
3813 patcher.masm()->b(branch_offset, pl);
3814 break;
3815 }
3816 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003817 // <decrement profiling counter>
3818 // mov r0, r0 (NOP)
3819 // ; load on-stack replacement address into ip - either of (for ARMv7):
3820 // ; <small cp load> | <extended cp load> | <immediate load>
3821 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
3822 // | movt ip, #imm> | movw ip, #imm
3823 // | ldr ip, [pp, ip]
3824 // ; or (for ARMv6):
3825 // ; <small cp load> | <extended cp load> | <immediate load>
3826 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
3827 // | orr ip, ip, #imm> | orr ip, ip, #imm
3828 // | orr ip, ip, #imm> | orr ip, ip, #imm
3829 // | orr ip, ip, #imm> | orr ip, ip, #imm
3830 // blx ip
3831 // <reset profiling counter>
3832 // ok-label
3833 patcher.masm()->nop();
3834 break;
3835 }
3836
3837 // Replace the call address.
3838 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3839 unoptimized_code, replacement_code->entry());
3840
3841 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3842 unoptimized_code, pc_immediate_load_address, replacement_code);
3843}
3844
3845
3846BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3847 Isolate* isolate,
3848 Code* unoptimized_code,
3849 Address pc) {
3850 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
3851
3852 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
3853 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01003854#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003855 Address interrupt_address = Assembler::target_address_at(
3856 pc_immediate_load_address, unoptimized_code);
Ben Murdochda12d292016-06-02 14:46:10 +01003857#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003858
3859 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
3860 DCHECK(interrupt_address ==
3861 isolate->builtins()->InterruptCheck()->entry());
3862 return INTERRUPT;
3863 }
3864
3865 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
3866
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003867 DCHECK(interrupt_address ==
Ben Murdochda12d292016-06-02 14:46:10 +01003868 isolate->builtins()->OnStackReplacement()->entry());
3869 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003870}
3871
3872
3873} // namespace internal
3874} // namespace v8
3875
3876#endif // V8_TARGET_ARCH_ARM