blob: 81c5ff2ae7704aef4924ced8f5e2d8725c3d1acc [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/arm/code-stubs-arm.h"
17#include "src/arm/macro-assembler-arm.h"
18
19namespace v8 {
20namespace internal {
21
Ben Murdoch097c5b22016-05-18 11:27:45 +010022#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30class JumpPatchSite BASE_EMBEDDED {
31 public:
32 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
33#ifdef DEBUG
34 info_emitted_ = false;
35#endif
36 }
37
38 ~JumpPatchSite() {
39 DCHECK(patch_site_.is_bound() == info_emitted_);
40 }
41
42 // When initially emitting this ensure that a jump is always generated to skip
43 // the inlined smi code.
44 void EmitJumpIfNotSmi(Register reg, Label* target) {
45 DCHECK(!patch_site_.is_bound() && !info_emitted_);
46 Assembler::BlockConstPoolScope block_const_pool(masm_);
47 __ bind(&patch_site_);
48 __ cmp(reg, Operand(reg));
49 __ b(eq, target); // Always taken before patched.
50 }
51
52 // When initially emitting this ensure that a jump is never generated to skip
53 // the inlined smi code.
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockConstPoolScope block_const_pool(masm_);
57 __ bind(&patch_site_);
58 __ cmp(reg, Operand(reg));
59 __ b(ne, target); // Never taken before patched.
60 }
61
62 void EmitPatchInfo() {
63 // Block literal pool emission whilst recording patch site information.
64 Assembler::BlockConstPoolScope block_const_pool(masm_);
65 if (patch_site_.is_bound()) {
66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
67 Register reg;
68 reg.set_code(delta_to_patch_site / kOff12Mask);
69 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
70#ifdef DEBUG
71 info_emitted_ = true;
72#endif
73 } else {
74 __ nop(); // Signals no inlined code.
75 }
76 }
77
78 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010079 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 MacroAssembler* masm_;
81 Label patch_site_;
82#ifdef DEBUG
83 bool info_emitted_;
84#endif
85};
86
87
88// Generate code for a JS function. On entry to the function the receiver
89// and arguments have been pushed on the stack left to right. The actual
90// argument count matches the formal parameter count expected by the
91// function.
92//
93// The live registers are:
94// o r1: the JS function object being called (i.e., ourselves)
95// o r3: the new target value
96// o cp: our context
97// o pp: our caller's constant pool pointer (if enabled)
98// o fp: our caller's frame pointer
99// o sp: stack pointer
100// o lr: return address
101//
102// The function builds a JS frame. Please see JavaScriptFrameConstants in
103// frames-arm.h for its layout.
104void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
115 __ ldr(r2, MemOperand(sp, receiver_offset));
116 __ AssertNotSmi(r2);
117 __ CompareObjectType(r2, r2, no_reg, FIRST_JS_RECEIVER_TYPE);
118 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119 }
120
121 // Open a frame scope to indicate that there is a frame on the stack. The
122 // MANUAL indicates that the scope shouldn't actually generate code to set up
123 // the frame (that is done below).
124 FrameScope frame_scope(masm_, StackFrame::MANUAL);
125
126 info->set_prologue_offset(masm_->pc_offset());
127 __ Prologue(info->GeneratePreagedPrologue());
128
129 { Comment cmnt(masm_, "[ Allocate locals");
130 int locals_count = info->scope()->num_stack_slots();
131 // Generators allocate locals, if any, in context slots.
132 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100133 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134 if (locals_count > 0) {
135 if (locals_count >= 128) {
136 Label ok;
137 __ sub(r9, sp, Operand(locals_count * kPointerSize));
138 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
139 __ cmp(r9, Operand(r2));
140 __ b(hs, &ok);
141 __ CallRuntime(Runtime::kThrowStackOverflow);
142 __ bind(&ok);
143 }
144 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
145 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
146 if (locals_count >= kMaxPushes) {
147 int loop_iterations = locals_count / kMaxPushes;
148 __ mov(r2, Operand(loop_iterations));
149 Label loop_header;
150 __ bind(&loop_header);
151 // Do pushes.
152 for (int i = 0; i < kMaxPushes; i++) {
153 __ push(r9);
154 }
155 // Continue loop if not done.
156 __ sub(r2, r2, Operand(1), SetCC);
157 __ b(&loop_header, ne);
158 }
159 int remaining = locals_count % kMaxPushes;
160 // Emit the remaining pushes.
161 for (int i = 0; i < remaining; i++) {
162 __ push(r9);
163 }
164 }
165 }
166
167 bool function_in_register_r1 = true;
168
169 // Possibly allocate a local context.
170 if (info->scope()->num_heap_slots() > 0) {
171 // Argument to NewContext is the function, which is still in r1.
172 Comment cmnt(masm_, "[ Allocate context");
173 bool need_write_barrier = true;
174 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
175 if (info->scope()->is_script_scope()) {
176 __ push(r1);
177 __ Push(info->scope()->GetScopeInfo(info->isolate()));
178 __ CallRuntime(Runtime::kNewScriptContext);
179 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
180 // The new target value is not used, clobbering is safe.
181 DCHECK_NULL(info->scope()->new_target_var());
182 } else {
183 if (info->scope()->new_target_var() != nullptr) {
184 __ push(r3); // Preserve new target.
185 }
186 if (slots <= FastNewContextStub::kMaximumSlots) {
187 FastNewContextStub stub(isolate(), slots);
188 __ CallStub(&stub);
189 // Result of FastNewContextStub is always in new space.
190 need_write_barrier = false;
191 } else {
192 __ push(r1);
193 __ CallRuntime(Runtime::kNewFunctionContext);
194 }
195 if (info->scope()->new_target_var() != nullptr) {
196 __ pop(r3); // Preserve new target.
197 }
198 }
199 function_in_register_r1 = false;
200 // Context is returned in r0. It replaces the context passed to us.
201 // It's saved in the stack and kept live in cp.
202 __ mov(cp, r0);
203 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
204 // Copy any necessary parameters into the context.
205 int num_parameters = info->scope()->num_parameters();
206 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
207 for (int i = first_parameter; i < num_parameters; i++) {
208 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
209 if (var->IsContextSlot()) {
210 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
211 (num_parameters - 1 - i) * kPointerSize;
212 // Load parameter from stack.
213 __ ldr(r0, MemOperand(fp, parameter_offset));
214 // Store it in the context.
215 MemOperand target = ContextMemOperand(cp, var->index());
216 __ str(r0, target);
217
218 // Update the write barrier.
219 if (need_write_barrier) {
220 __ RecordWriteContextSlot(cp, target.offset(), r0, r2,
221 kLRHasBeenSaved, kDontSaveFPRegs);
222 } else if (FLAG_debug_code) {
223 Label done;
224 __ JumpIfInNewSpace(cp, r0, &done);
225 __ Abort(kExpectedNewSpaceObject);
226 __ bind(&done);
227 }
228 }
229 }
230 }
231
232 // Register holding this function and new target are both trashed in case we
233 // bailout here. But since that can happen only when new target is not used
234 // and we allocate a context, the value of |function_in_register| is correct.
235 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
236
237 // Possibly set up a local binding to the this function which is used in
238 // derived constructors with super calls.
239 Variable* this_function_var = scope()->this_function_var();
240 if (this_function_var != nullptr) {
241 Comment cmnt(masm_, "[ This function");
242 if (!function_in_register_r1) {
243 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
244 // The write barrier clobbers register again, keep it marked as such.
245 }
246 SetVar(this_function_var, r1, r0, r2);
247 }
248
249 // Possibly set up a local binding to the new target value.
250 Variable* new_target_var = scope()->new_target_var();
251 if (new_target_var != nullptr) {
252 Comment cmnt(masm_, "[ new.target");
253 SetVar(new_target_var, r3, r0, r2);
254 }
255
256 // Possibly allocate RestParameters
257 int rest_index;
258 Variable* rest_param = scope()->rest_parameter(&rest_index);
259 if (rest_param) {
260 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100261 if (!function_in_register_r1) {
262 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
263 }
264 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100266 function_in_register_r1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 SetVar(rest_param, r0, r1, r2);
268 }
269
270 Variable* arguments = scope()->arguments();
271 if (arguments != NULL) {
272 // Function uses arguments object.
273 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000274 if (!function_in_register_r1) {
275 // Load this again, if it's used by the local context below.
276 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
277 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100278 if (is_strict(language_mode()) || !has_simple_parameters()) {
279 FastNewStrictArgumentsStub stub(isolate());
280 __ CallStub(&stub);
281 } else if (literal()->has_duplicate_parameters()) {
282 __ Push(r1);
283 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
284 } else {
285 FastNewSloppyArgumentsStub stub(isolate());
286 __ CallStub(&stub);
287 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000288
289 SetVar(arguments, r0, r1, r2);
290 }
291
292 if (FLAG_trace) {
293 __ CallRuntime(Runtime::kTraceEnter);
294 }
295
Ben Murdochda12d292016-06-02 14:46:10 +0100296 // Visit the declarations and body.
297 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
298 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100300 VisitDeclarations(scope()->declarations());
301 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302
Ben Murdochda12d292016-06-02 14:46:10 +0100303 // Assert that the declarations do not use ICs. Otherwise the debugger
304 // won't be able to redirect a PC at an IC to the correct IC in newly
305 // recompiled code.
306 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307
Ben Murdochda12d292016-06-02 14:46:10 +0100308 {
309 Comment cmnt(masm_, "[ Stack check");
310 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
311 Label ok;
312 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
313 __ cmp(sp, Operand(ip));
314 __ b(hs, &ok);
315 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
316 PredictableCodeSizeScope predictable(masm_);
317 predictable.ExpectSize(
318 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
319 __ Call(stack_check, RelocInfo::CODE_TARGET);
320 __ bind(&ok);
321 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000322
Ben Murdochda12d292016-06-02 14:46:10 +0100323 {
324 Comment cmnt(masm_, "[ Body");
325 DCHECK(loop_depth() == 0);
326 VisitStatements(literal()->body());
327 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 }
329
330 // Always emit a 'return undefined' in case control fell off the end of
331 // the body.
332 { Comment cmnt(masm_, "[ return <undefined>;");
333 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
334 }
335 EmitReturnSequence();
336
337 // Force emit the constant pool, so it doesn't get emitted in the middle
338 // of the back edge table.
339 masm()->CheckConstPool(true, false);
340}
341
342
343void FullCodeGenerator::ClearAccumulator() {
344 __ mov(r0, Operand(Smi::FromInt(0)));
345}
346
347
348void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
349 __ mov(r2, Operand(profiling_counter_));
350 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
351 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
352 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
353}
354
355
356#ifdef CAN_USE_ARMV7_INSTRUCTIONS
357static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
358#else
359static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
360#endif
361
362
363void FullCodeGenerator::EmitProfilingCounterReset() {
364 Assembler::BlockConstPoolScope block_const_pool(masm_);
365 PredictableCodeSizeScope predictable_code_size_scope(
366 masm_, kProfileCounterResetSequenceLength);
367 Label start;
368 __ bind(&start);
369 int reset_value = FLAG_interrupt_budget;
370 __ mov(r2, Operand(profiling_counter_));
371 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
372 // instructions (for ARMv6) depending upon whether it is an extended constant
373 // pool - insert nop to compensate.
374 int expected_instr_count =
375 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
376 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
377 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
378 __ nop();
379 }
380 __ mov(r3, Operand(Smi::FromInt(reset_value)));
381 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
382}
383
384
385void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
386 Label* back_edge_target) {
387 Comment cmnt(masm_, "[ Back edge bookkeeping");
388 // Block literal pools whilst emitting back edge code.
389 Assembler::BlockConstPoolScope block_const_pool(masm_);
390 Label ok;
391
392 DCHECK(back_edge_target->is_bound());
393 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
394 int weight = Min(kMaxBackEdgeWeight,
395 Max(1, distance / kCodeSizeMultiplier));
396 EmitProfilingCounterDecrement(weight);
397 __ b(pl, &ok);
398 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
399
400 // Record a mapping of this PC offset to the OSR id. This is used to find
401 // the AST id from the unoptimized code in order to use it as a key into
402 // the deoptimization input data found in the optimized code.
403 RecordBackEdge(stmt->OsrEntryId());
404
405 EmitProfilingCounterReset();
406
407 __ bind(&ok);
408 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
409 // Record a mapping of the OSR id to this PC. This is used if the OSR
410 // entry becomes the target of a bailout. We don't expect it to be, but
411 // we want it to work if it is.
412 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
413}
414
Ben Murdoch097c5b22016-05-18 11:27:45 +0100415void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
416 bool is_tail_call) {
417 // Pretend that the exit is a backwards jump to the entry.
418 int weight = 1;
419 if (info_->ShouldSelfOptimize()) {
420 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
421 } else {
422 int distance = masm_->pc_offset();
423 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
424 }
425 EmitProfilingCounterDecrement(weight);
426 Label ok;
427 __ b(pl, &ok);
428 // Don't need to save result register if we are going to do a tail call.
429 if (!is_tail_call) {
430 __ push(r0);
431 }
432 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
433 if (!is_tail_call) {
434 __ pop(r0);
435 }
436 EmitProfilingCounterReset();
437 __ bind(&ok);
438}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000439
440void FullCodeGenerator::EmitReturnSequence() {
441 Comment cmnt(masm_, "[ Return sequence");
442 if (return_label_.is_bound()) {
443 __ b(&return_label_);
444 } else {
445 __ bind(&return_label_);
446 if (FLAG_trace) {
447 // Push the return value on the stack as the parameter.
448 // Runtime::TraceExit returns its parameter in r0.
449 __ push(r0);
450 __ CallRuntime(Runtime::kTraceExit);
451 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100452 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000453
454 // Make sure that the constant pool is not emitted inside of the return
455 // sequence.
456 { Assembler::BlockConstPoolScope block_const_pool(masm_);
457 int32_t arg_count = info_->scope()->num_parameters() + 1;
458 int32_t sp_delta = arg_count * kPointerSize;
459 SetReturnPosition(literal());
460 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
461 PredictableCodeSizeScope predictable(masm_, -1);
462 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
463 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
464 __ add(sp, sp, Operand(sp_delta));
465 __ Jump(lr);
466 }
467 }
468 }
469}
470
471
472void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
473 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
474 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100475 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000476}
477
478
479void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
480}
481
482
483void FullCodeGenerator::AccumulatorValueContext::Plug(
484 Heap::RootListIndex index) const {
485 __ LoadRoot(result_register(), index);
486}
487
488
489void FullCodeGenerator::StackValueContext::Plug(
490 Heap::RootListIndex index) const {
491 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100492 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493}
494
495
496void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
497 codegen()->PrepareForBailoutBeforeSplit(condition(),
498 true,
499 true_label_,
500 false_label_);
501 if (index == Heap::kUndefinedValueRootIndex ||
502 index == Heap::kNullValueRootIndex ||
503 index == Heap::kFalseValueRootIndex) {
504 if (false_label_ != fall_through_) __ b(false_label_);
505 } else if (index == Heap::kTrueValueRootIndex) {
506 if (true_label_ != fall_through_) __ b(true_label_);
507 } else {
508 __ LoadRoot(result_register(), index);
509 codegen()->DoTest(this);
510 }
511}
512
513
514void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
515}
516
517
518void FullCodeGenerator::AccumulatorValueContext::Plug(
519 Handle<Object> lit) const {
520 __ mov(result_register(), Operand(lit));
521}
522
523
524void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
525 // Immediates cannot be pushed directly.
526 __ mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100527 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000528}
529
530
531void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
532 codegen()->PrepareForBailoutBeforeSplit(condition(),
533 true,
534 true_label_,
535 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100536 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000537 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
538 if (false_label_ != fall_through_) __ b(false_label_);
539 } else if (lit->IsTrue() || lit->IsJSObject()) {
540 if (true_label_ != fall_through_) __ b(true_label_);
541 } else if (lit->IsString()) {
542 if (String::cast(*lit)->length() == 0) {
543 if (false_label_ != fall_through_) __ b(false_label_);
544 } else {
545 if (true_label_ != fall_through_) __ b(true_label_);
546 }
547 } else if (lit->IsSmi()) {
548 if (Smi::cast(*lit)->value() == 0) {
549 if (false_label_ != fall_through_) __ b(false_label_);
550 } else {
551 if (true_label_ != fall_through_) __ b(true_label_);
552 }
553 } else {
554 // For simplicity we always test the accumulator register.
555 __ mov(result_register(), Operand(lit));
556 codegen()->DoTest(this);
557 }
558}
559
560
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
562 Register reg) const {
563 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100564 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565 __ str(reg, MemOperand(sp, 0));
566}
567
568
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
570 Label* materialize_false) const {
571 DCHECK(materialize_true == materialize_false);
572 __ bind(materialize_true);
573}
574
575
576void FullCodeGenerator::AccumulatorValueContext::Plug(
577 Label* materialize_true,
578 Label* materialize_false) const {
579 Label done;
580 __ bind(materialize_true);
581 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
582 __ jmp(&done);
583 __ bind(materialize_false);
584 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
585 __ bind(&done);
586}
587
588
589void FullCodeGenerator::StackValueContext::Plug(
590 Label* materialize_true,
591 Label* materialize_false) const {
592 Label done;
593 __ bind(materialize_true);
594 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
595 __ jmp(&done);
596 __ bind(materialize_false);
597 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
598 __ bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100599 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000600}
601
602
603void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
604 Label* materialize_false) const {
605 DCHECK(materialize_true == true_label_);
606 DCHECK(materialize_false == false_label_);
607}
608
609
610void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
611 Heap::RootListIndex value_root_index =
612 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
613 __ LoadRoot(result_register(), value_root_index);
614}
615
616
617void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
618 Heap::RootListIndex value_root_index =
619 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
620 __ LoadRoot(ip, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100621 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000622}
623
624
625void FullCodeGenerator::TestContext::Plug(bool flag) const {
626 codegen()->PrepareForBailoutBeforeSplit(condition(),
627 true,
628 true_label_,
629 false_label_);
630 if (flag) {
631 if (true_label_ != fall_through_) __ b(true_label_);
632 } else {
633 if (false_label_ != fall_through_) __ b(false_label_);
634 }
635}
636
637
638void FullCodeGenerator::DoTest(Expression* condition,
639 Label* if_true,
640 Label* if_false,
641 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100642 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000643 CallIC(ic, condition->test_id());
644 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
645 Split(eq, if_true, if_false, fall_through);
646}
647
648
649void FullCodeGenerator::Split(Condition cond,
650 Label* if_true,
651 Label* if_false,
652 Label* fall_through) {
653 if (if_false == fall_through) {
654 __ b(cond, if_true);
655 } else if (if_true == fall_through) {
656 __ b(NegateCondition(cond), if_false);
657 } else {
658 __ b(cond, if_true);
659 __ b(if_false);
660 }
661}
662
663
664MemOperand FullCodeGenerator::StackOperand(Variable* var) {
665 DCHECK(var->IsStackAllocated());
666 // Offset is negative because higher indexes are at lower addresses.
667 int offset = -var->index() * kPointerSize;
668 // Adjust by a (parameter or local) base offset.
669 if (var->IsParameter()) {
670 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
671 } else {
672 offset += JavaScriptFrameConstants::kLocal0Offset;
673 }
674 return MemOperand(fp, offset);
675}
676
677
678MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
679 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
680 if (var->IsContextSlot()) {
681 int context_chain_length = scope()->ContextChainLength(var->scope());
682 __ LoadContext(scratch, context_chain_length);
683 return ContextMemOperand(scratch, var->index());
684 } else {
685 return StackOperand(var);
686 }
687}
688
689
690void FullCodeGenerator::GetVar(Register dest, Variable* var) {
691 // Use destination as scratch.
692 MemOperand location = VarOperand(var, dest);
693 __ ldr(dest, location);
694}
695
696
697void FullCodeGenerator::SetVar(Variable* var,
698 Register src,
699 Register scratch0,
700 Register scratch1) {
701 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702 DCHECK(!scratch0.is(src));
703 DCHECK(!scratch0.is(scratch1));
704 DCHECK(!scratch1.is(src));
705 MemOperand location = VarOperand(var, scratch0);
706 __ str(src, location);
707
708 // Emit the write barrier code if the location is in the heap.
709 if (var->IsContextSlot()) {
710 __ RecordWriteContextSlot(scratch0,
711 location.offset(),
712 src,
713 scratch1,
714 kLRHasBeenSaved,
715 kDontSaveFPRegs);
716 }
717}
718
719
720void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
721 bool should_normalize,
722 Label* if_true,
723 Label* if_false) {
724 // Only prepare for bailouts before splits if we're in a test
725 // context. Otherwise, we let the Visit function deal with the
726 // preparation to avoid preparing with the same AST id twice.
727 if (!context()->IsTest()) return;
728
729 Label skip;
730 if (should_normalize) __ b(&skip);
731 PrepareForBailout(expr, TOS_REG);
732 if (should_normalize) {
733 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
734 __ cmp(r0, ip);
735 Split(eq, if_true, if_false, NULL);
736 __ bind(&skip);
737 }
738}
739
740
741void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
742 // The variable in the declaration always resides in the current function
743 // context.
744 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100745 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000746 // Check that we're not inside a with or catch context.
747 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
748 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
749 __ Check(ne, kDeclarationInWithContext);
750 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
751 __ Check(ne, kDeclarationInCatchContext);
752 }
753}
754
755
756void FullCodeGenerator::VisitVariableDeclaration(
757 VariableDeclaration* declaration) {
758 // If it was not possible to allocate the variable at compile time, we
759 // need to "declare" it at runtime to make sure it actually exists in the
760 // local context.
761 VariableProxy* proxy = declaration->proxy();
762 VariableMode mode = declaration->mode();
763 Variable* variable = proxy->var();
764 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
765 switch (variable->location()) {
766 case VariableLocation::GLOBAL:
767 case VariableLocation::UNALLOCATED:
768 globals_->Add(variable->name(), zone());
769 globals_->Add(variable->binding_needs_init()
770 ? isolate()->factory()->the_hole_value()
771 : isolate()->factory()->undefined_value(),
772 zone());
773 break;
774
775 case VariableLocation::PARAMETER:
776 case VariableLocation::LOCAL:
777 if (hole_init) {
778 Comment cmnt(masm_, "[ VariableDeclaration");
779 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
780 __ str(r0, StackOperand(variable));
781 }
782 break;
783
784 case VariableLocation::CONTEXT:
785 if (hole_init) {
786 Comment cmnt(masm_, "[ VariableDeclaration");
787 EmitDebugCheckDeclarationContext(variable);
788 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
789 __ str(r0, ContextMemOperand(cp, variable->index()));
790 // No write barrier since the_hole_value is in old space.
791 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
792 }
793 break;
794
795 case VariableLocation::LOOKUP: {
796 Comment cmnt(masm_, "[ VariableDeclaration");
797 __ mov(r2, Operand(variable->name()));
798 // Declaration nodes are always introduced in one of four modes.
799 DCHECK(IsDeclaredVariableMode(mode));
800 // Push initial value, if any.
801 // Note: For variables we must not push an initial value (such as
802 // 'undefined') because we may have a (legal) redeclaration and we
803 // must not destroy the current value.
804 if (hole_init) {
805 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
806 } else {
807 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
808 }
809 __ Push(r2, r0);
810 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
811 __ CallRuntime(Runtime::kDeclareLookupSlot);
812 break;
813 }
814 }
815}
816
817
818void FullCodeGenerator::VisitFunctionDeclaration(
819 FunctionDeclaration* declaration) {
820 VariableProxy* proxy = declaration->proxy();
821 Variable* variable = proxy->var();
822 switch (variable->location()) {
823 case VariableLocation::GLOBAL:
824 case VariableLocation::UNALLOCATED: {
825 globals_->Add(variable->name(), zone());
826 Handle<SharedFunctionInfo> function =
827 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
828 // Check for stack-overflow exception.
829 if (function.is_null()) return SetStackOverflow();
830 globals_->Add(function, zone());
831 break;
832 }
833
834 case VariableLocation::PARAMETER:
835 case VariableLocation::LOCAL: {
836 Comment cmnt(masm_, "[ FunctionDeclaration");
837 VisitForAccumulatorValue(declaration->fun());
838 __ str(result_register(), StackOperand(variable));
839 break;
840 }
841
842 case VariableLocation::CONTEXT: {
843 Comment cmnt(masm_, "[ FunctionDeclaration");
844 EmitDebugCheckDeclarationContext(variable);
845 VisitForAccumulatorValue(declaration->fun());
846 __ str(result_register(), ContextMemOperand(cp, variable->index()));
847 int offset = Context::SlotOffset(variable->index());
848 // We know that we have written a function, which is not a smi.
849 __ RecordWriteContextSlot(cp,
850 offset,
851 result_register(),
852 r2,
853 kLRHasBeenSaved,
854 kDontSaveFPRegs,
855 EMIT_REMEMBERED_SET,
856 OMIT_SMI_CHECK);
857 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
858 break;
859 }
860
861 case VariableLocation::LOOKUP: {
862 Comment cmnt(masm_, "[ FunctionDeclaration");
863 __ mov(r2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100864 PushOperand(r2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000865 // Push initial value for function declaration.
866 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100867 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
868 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000869 break;
870 }
871 }
872}
873
874
875void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
876 // Call the runtime to declare the globals.
877 __ mov(r1, Operand(pairs));
878 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
879 __ Push(r1, r0);
880 __ CallRuntime(Runtime::kDeclareGlobals);
881 // Return value is ignored.
882}
883
884
885void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
886 // Call the runtime to declare the modules.
887 __ Push(descriptions);
888 __ CallRuntime(Runtime::kDeclareModules);
889 // Return value is ignored.
890}
891
892
893void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
894 Comment cmnt(masm_, "[ SwitchStatement");
895 Breakable nested_statement(this, stmt);
896 SetStatementPosition(stmt);
897
898 // Keep the switch value on the stack until a case matches.
899 VisitForStackValue(stmt->tag());
900 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
901
902 ZoneList<CaseClause*>* clauses = stmt->cases();
903 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
904
905 Label next_test; // Recycled for each test.
906 // Compile all the tests with branches to their bodies.
907 for (int i = 0; i < clauses->length(); i++) {
908 CaseClause* clause = clauses->at(i);
909 clause->body_target()->Unuse();
910
911 // The default is not a test, but remember it as final fall through.
912 if (clause->is_default()) {
913 default_clause = clause;
914 continue;
915 }
916
917 Comment cmnt(masm_, "[ Case comparison");
918 __ bind(&next_test);
919 next_test.Unuse();
920
921 // Compile the label expression.
922 VisitForAccumulatorValue(clause->label());
923
924 // Perform the comparison as if via '==='.
925 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
926 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
927 JumpPatchSite patch_site(masm_);
928 if (inline_smi_code) {
929 Label slow_case;
930 __ orr(r2, r1, r0);
931 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
932
933 __ cmp(r1, r0);
934 __ b(ne, &next_test);
935 __ Drop(1); // Switch value is no longer needed.
936 __ b(clause->body_target());
937 __ bind(&slow_case);
938 }
939
940 // Record position before stub call for type feedback.
941 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100942 Handle<Code> ic =
943 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000944 CallIC(ic, clause->CompareId());
945 patch_site.EmitPatchInfo();
946
947 Label skip;
948 __ b(&skip);
949 PrepareForBailout(clause, TOS_REG);
950 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
951 __ cmp(r0, ip);
952 __ b(ne, &next_test);
953 __ Drop(1);
954 __ jmp(clause->body_target());
955 __ bind(&skip);
956
957 __ cmp(r0, Operand::Zero());
958 __ b(ne, &next_test);
959 __ Drop(1); // Switch value is no longer needed.
960 __ b(clause->body_target());
961 }
962
963 // Discard the test value and jump to the default if present, otherwise to
964 // the end of the statement.
965 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100966 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000967 if (default_clause == NULL) {
968 __ b(nested_statement.break_label());
969 } else {
970 __ b(default_clause->body_target());
971 }
972
973 // Compile all the case bodies.
974 for (int i = 0; i < clauses->length(); i++) {
975 Comment cmnt(masm_, "[ Case body");
976 CaseClause* clause = clauses->at(i);
977 __ bind(clause->body_target());
978 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
979 VisitStatements(clause->statements());
980 }
981
982 __ bind(nested_statement.break_label());
983 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
984}
985
986
987void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
988 Comment cmnt(masm_, "[ ForInStatement");
989 SetStatementPosition(stmt, SKIP_BREAK);
990
991 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
992
Ben Murdoch097c5b22016-05-18 11:27:45 +0100993 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000994 SetExpressionAsStatementPosition(stmt->enumerable());
995 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100996 OperandStackDepthIncrement(5);
997
998 Label loop, exit;
999 Iteration loop_statement(this, stmt);
1000 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001
Ben Murdoch097c5b22016-05-18 11:27:45 +01001002 // If the object is null or undefined, skip over the loop, otherwise convert
1003 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001004 Label convert, done_convert;
1005 __ JumpIfSmi(r0, &convert);
1006 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
1007 __ b(ge, &done_convert);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001008 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1009 __ b(eq, &exit);
1010 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1011 __ b(eq, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001012 __ bind(&convert);
1013 ToObjectStub stub(isolate());
1014 __ CallStub(&stub);
1015 __ bind(&done_convert);
1016 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1017 __ push(r0);
1018
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001019 // Check cache validity in generated code. This is a fast case for
1020 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1021 // guarantee cache validity, call the runtime system to check cache
1022 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001023 // Note: Proxies never have an enum cache, so will always take the
1024 // slow path.
1025 Label call_runtime;
1026 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001027
1028 // The enum cache is valid. Load the map of the object being
1029 // iterated over and use the cache for the iteration.
1030 Label use_cache;
1031 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1032 __ b(&use_cache);
1033
1034 // Get the set of properties to enumerate.
1035 __ bind(&call_runtime);
1036 __ push(r0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001037 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1039
1040 // If we got a map from the runtime call, we can do a fast
1041 // modification check. Otherwise, we got a fixed array, and we have
1042 // to do a slow check.
1043 Label fixed_array;
1044 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1045 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1046 __ cmp(r2, ip);
1047 __ b(ne, &fixed_array);
1048
1049 // We got a map in register r0. Get the enumeration cache from it.
1050 Label no_descriptors;
1051 __ bind(&use_cache);
1052
1053 __ EnumLength(r1, r0);
1054 __ cmp(r1, Operand(Smi::FromInt(0)));
1055 __ b(eq, &no_descriptors);
1056
1057 __ LoadInstanceDescriptors(r0, r2);
1058 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1059 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1060
1061 // Set up the four remaining stack slots.
1062 __ push(r0); // Map.
1063 __ mov(r0, Operand(Smi::FromInt(0)));
1064 // Push enumeration cache, enumeration cache length (as smi) and zero.
1065 __ Push(r2, r1, r0);
1066 __ jmp(&loop);
1067
1068 __ bind(&no_descriptors);
1069 __ Drop(1);
1070 __ jmp(&exit);
1071
1072 // We got a fixed array in register r0. Iterate through that.
1073 __ bind(&fixed_array);
1074
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001075 __ mov(r1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1076 __ Push(r1, r0); // Smi and array
1077 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001078 __ Push(r1); // Fixed array length (as smi).
1079 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080 __ mov(r0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001081 __ Push(r0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001082
1083 // Generate code for doing the condition check.
1084 __ bind(&loop);
1085 SetExpressionAsStatementPosition(stmt->each());
1086
1087 // Load the current count to r0, load the length to r1.
1088 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1089 __ cmp(r0, r1); // Compare to the array length.
1090 __ b(hs, loop_statement.break_label());
1091
1092 // Get the current entry of the array into register r3.
1093 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1094 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1095 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1096
1097 // Get the expected map from the stack or a smi in the
1098 // permanent slow case into register r2.
1099 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1100
1101 // Check if the expected map still matches that of the enumerable.
1102 // If not, we may have to filter the key.
1103 Label update_each;
1104 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1105 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1106 __ cmp(r4, Operand(r2));
1107 __ b(eq, &update_each);
1108
Ben Murdochda12d292016-06-02 14:46:10 +01001109 // We need to filter the key, record slow-path here.
1110 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001111 __ EmitLoadTypeFeedbackVector(r0);
1112 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1113 __ str(r2, FieldMemOperand(r0, FixedArray::OffsetOfElementAt(vector_index)));
1114
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001115 // Convert the entry to a string or (smi) 0 if it isn't a property
1116 // any more. If the property has been removed while iterating, we
1117 // just skip it.
1118 __ push(r1); // Enumerable.
1119 __ push(r3); // Current entry.
1120 __ CallRuntime(Runtime::kForInFilter);
1121 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1122 __ mov(r3, Operand(r0));
1123 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1124 __ cmp(r0, ip);
1125 __ b(eq, loop_statement.continue_label());
1126
1127 // Update the 'each' property or variable from the possibly filtered
1128 // entry in register r3.
1129 __ bind(&update_each);
1130 __ mov(result_register(), r3);
1131 // Perform the assignment as if via '='.
1132 { EffectContext context(this);
1133 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1134 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1135 }
1136
1137 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1138 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1139 // Generate code for the body of the loop.
1140 Visit(stmt->body());
1141
1142 // Generate code for the going to the next element by incrementing
1143 // the index (smi) stored on top of the stack.
1144 __ bind(loop_statement.continue_label());
1145 __ pop(r0);
1146 __ add(r0, r0, Operand(Smi::FromInt(1)));
1147 __ push(r0);
1148
1149 EmitBackEdgeBookkeeping(stmt, &loop);
1150 __ b(&loop);
1151
1152 // Remove the pointers stored on the stack.
1153 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001154 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001155
1156 // Exit and decrement the loop depth.
1157 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1158 __ bind(&exit);
1159 decrement_loop_depth();
1160}
1161
1162
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001163void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1164 FeedbackVectorSlot slot) {
1165 DCHECK(NeedsHomeObject(initializer));
1166 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1167 __ mov(StoreDescriptor::NameRegister(),
1168 Operand(isolate()->factory()->home_object_symbol()));
1169 __ ldr(StoreDescriptor::ValueRegister(),
1170 MemOperand(sp, offset * kPointerSize));
1171 EmitLoadStoreICSlot(slot);
1172 CallStoreIC();
1173}
1174
1175
1176void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1177 int offset,
1178 FeedbackVectorSlot slot) {
1179 DCHECK(NeedsHomeObject(initializer));
1180 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1181 __ mov(StoreDescriptor::NameRegister(),
1182 Operand(isolate()->factory()->home_object_symbol()));
1183 __ ldr(StoreDescriptor::ValueRegister(),
1184 MemOperand(sp, offset * kPointerSize));
1185 EmitLoadStoreICSlot(slot);
1186 CallStoreIC();
1187}
1188
1189
1190void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1191 TypeofMode typeof_mode,
1192 Label* slow) {
1193 Register current = cp;
1194 Register next = r1;
1195 Register temp = r2;
1196
1197 Scope* s = scope();
1198 while (s != NULL) {
1199 if (s->num_heap_slots() > 0) {
1200 if (s->calls_sloppy_eval()) {
1201 // Check that extension is "the hole".
1202 __ ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1203 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1204 }
1205 // Load next context in chain.
1206 __ ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1207 // Walk the rest of the chain without clobbering cp.
1208 current = next;
1209 }
1210 // If no outer scope calls eval, we do not need to check more
1211 // context extensions.
1212 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1213 s = s->outer_scope();
1214 }
1215
1216 if (s->is_eval_scope()) {
1217 Label loop, fast;
1218 if (!current.is(next)) {
1219 __ Move(next, current);
1220 }
1221 __ bind(&loop);
1222 // Terminate at native context.
1223 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1224 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1225 __ cmp(temp, ip);
1226 __ b(eq, &fast);
1227 // Check that extension is "the hole".
1228 __ ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1229 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1230 // Load next context in chain.
1231 __ ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1232 __ b(&loop);
1233 __ bind(&fast);
1234 }
1235
1236 // All extension objects were empty and it is safe to use a normal global
1237 // load machinery.
1238 EmitGlobalVariableLoad(proxy, typeof_mode);
1239}
1240
1241
1242MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1243 Label* slow) {
1244 DCHECK(var->IsContextSlot());
1245 Register context = cp;
1246 Register next = r3;
1247 Register temp = r4;
1248
1249 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1250 if (s->num_heap_slots() > 0) {
1251 if (s->calls_sloppy_eval()) {
1252 // Check that extension is "the hole".
1253 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1254 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1255 }
1256 __ ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1257 // Walk the rest of the chain without clobbering cp.
1258 context = next;
1259 }
1260 }
1261 // Check that last extension is "the hole".
1262 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1263 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1264
1265 // This function is used only for loads, not stores, so it's safe to
1266 // return an cp-based operand (the write barrier cannot be allowed to
1267 // destroy the cp register).
1268 return ContextMemOperand(context, var->index());
1269}
1270
1271
1272void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1273 TypeofMode typeof_mode,
1274 Label* slow, Label* done) {
1275 // Generate fast-case code for variables that might be shadowed by
1276 // eval-introduced variables. Eval is used a lot without
1277 // introducing variables. In those cases, we do not want to
1278 // perform a runtime call for all variables in the scope
1279 // containing the eval.
1280 Variable* var = proxy->var();
1281 if (var->mode() == DYNAMIC_GLOBAL) {
1282 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1283 __ jmp(done);
1284 } else if (var->mode() == DYNAMIC_LOCAL) {
1285 Variable* local = var->local_if_not_shadowed();
1286 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1287 if (local->mode() == LET || local->mode() == CONST ||
1288 local->mode() == CONST_LEGACY) {
1289 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1290 if (local->mode() == CONST_LEGACY) {
1291 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1292 } else { // LET || CONST
1293 __ b(ne, done);
1294 __ mov(r0, Operand(var->name()));
1295 __ push(r0);
1296 __ CallRuntime(Runtime::kThrowReferenceError);
1297 }
1298 }
1299 __ jmp(done);
1300 }
1301}
1302
1303
1304void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1305 TypeofMode typeof_mode) {
1306 Variable* var = proxy->var();
1307 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1308 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1309 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1310 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1311 __ mov(LoadDescriptor::SlotRegister(),
1312 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1313 CallLoadIC(typeof_mode);
1314}
1315
1316
1317void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1318 TypeofMode typeof_mode) {
1319 // Record position before possible IC call.
1320 SetExpressionPosition(proxy);
1321 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1322 Variable* var = proxy->var();
1323
1324 // Three cases: global variables, lookup variables, and all other types of
1325 // variables.
1326 switch (var->location()) {
1327 case VariableLocation::GLOBAL:
1328 case VariableLocation::UNALLOCATED: {
1329 Comment cmnt(masm_, "[ Global variable");
1330 EmitGlobalVariableLoad(proxy, typeof_mode);
1331 context()->Plug(r0);
1332 break;
1333 }
1334
1335 case VariableLocation::PARAMETER:
1336 case VariableLocation::LOCAL:
1337 case VariableLocation::CONTEXT: {
1338 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1339 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1340 : "[ Stack variable");
1341 if (NeedsHoleCheckForLoad(proxy)) {
1342 // Let and const need a read barrier.
1343 GetVar(r0, var);
1344 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1345 if (var->mode() == LET || var->mode() == CONST) {
1346 // Throw a reference error when using an uninitialized let/const
1347 // binding in harmony mode.
1348 Label done;
1349 __ b(ne, &done);
1350 __ mov(r0, Operand(var->name()));
1351 __ push(r0);
1352 __ CallRuntime(Runtime::kThrowReferenceError);
1353 __ bind(&done);
1354 } else {
1355 // Uninitialized legacy const bindings are unholed.
1356 DCHECK(var->mode() == CONST_LEGACY);
1357 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1358 }
1359 context()->Plug(r0);
1360 break;
1361 }
1362 context()->Plug(var);
1363 break;
1364 }
1365
1366 case VariableLocation::LOOKUP: {
1367 Comment cmnt(masm_, "[ Lookup variable");
1368 Label done, slow;
1369 // Generate code for loading from variables potentially shadowed
1370 // by eval-introduced variables.
1371 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1372 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001373 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001374 Runtime::FunctionId function_id =
1375 typeof_mode == NOT_INSIDE_TYPEOF
1376 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001377 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001378 __ CallRuntime(function_id);
1379 __ bind(&done);
1380 context()->Plug(r0);
1381 }
1382 }
1383}
1384
1385
1386void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1387 Comment cmnt(masm_, "[ RegExpLiteral");
1388 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1389 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1390 __ mov(r1, Operand(expr->pattern()));
1391 __ mov(r0, Operand(Smi::FromInt(expr->flags())));
1392 FastCloneRegExpStub stub(isolate());
1393 __ CallStub(&stub);
1394 context()->Plug(r0);
1395}
1396
1397
1398void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1399 Expression* expression = (property == NULL) ? NULL : property->value();
1400 if (expression == NULL) {
1401 __ LoadRoot(r1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001402 PushOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001403 } else {
1404 VisitForStackValue(expression);
1405 if (NeedsHomeObject(expression)) {
1406 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1407 property->kind() == ObjectLiteral::Property::SETTER);
1408 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1409 EmitSetHomeObject(expression, offset, property->GetSlot());
1410 }
1411 }
1412}
1413
1414
1415void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1416 Comment cmnt(masm_, "[ ObjectLiteral");
1417
1418 Handle<FixedArray> constant_properties = expr->constant_properties();
1419 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1420 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1421 __ mov(r1, Operand(constant_properties));
1422 int flags = expr->ComputeFlags();
1423 __ mov(r0, Operand(Smi::FromInt(flags)));
1424 if (MustCreateObjectLiteralWithRuntime(expr)) {
1425 __ Push(r3, r2, r1, r0);
1426 __ CallRuntime(Runtime::kCreateObjectLiteral);
1427 } else {
1428 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1429 __ CallStub(&stub);
1430 }
1431 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1432
1433 // If result_saved is true the result is on top of the stack. If
1434 // result_saved is false the result is in r0.
1435 bool result_saved = false;
1436
1437 AccessorTable accessor_table(zone());
1438 int property_index = 0;
1439 for (; property_index < expr->properties()->length(); property_index++) {
1440 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1441 if (property->is_computed_name()) break;
1442 if (property->IsCompileTimeValue()) continue;
1443
1444 Literal* key = property->key()->AsLiteral();
1445 Expression* value = property->value();
1446 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001447 PushOperand(r0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448 result_saved = true;
1449 }
1450 switch (property->kind()) {
1451 case ObjectLiteral::Property::CONSTANT:
1452 UNREACHABLE();
1453 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1454 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1455 // Fall through.
1456 case ObjectLiteral::Property::COMPUTED:
1457 // It is safe to use [[Put]] here because the boilerplate already
1458 // contains computed properties with an uninitialized value.
1459 if (key->value()->IsInternalizedString()) {
1460 if (property->emit_store()) {
1461 VisitForAccumulatorValue(value);
1462 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1463 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1464 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1465 EmitLoadStoreICSlot(property->GetSlot(0));
1466 CallStoreIC();
1467 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1468
1469 if (NeedsHomeObject(value)) {
1470 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1471 }
1472 } else {
1473 VisitForEffect(value);
1474 }
1475 break;
1476 }
1477 // Duplicate receiver on stack.
1478 __ ldr(r0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001479 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001480 VisitForStackValue(key);
1481 VisitForStackValue(value);
1482 if (property->emit_store()) {
1483 if (NeedsHomeObject(value)) {
1484 EmitSetHomeObject(value, 2, property->GetSlot());
1485 }
1486 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
Ben Murdoch097c5b22016-05-18 11:27:45 +01001487 PushOperand(r0);
1488 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001489 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001490 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001491 }
1492 break;
1493 case ObjectLiteral::Property::PROTOTYPE:
1494 // Duplicate receiver on stack.
1495 __ ldr(r0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001496 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001497 VisitForStackValue(value);
1498 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001499 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001500 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1501 NO_REGISTERS);
1502 break;
1503
1504 case ObjectLiteral::Property::GETTER:
1505 if (property->emit_store()) {
1506 accessor_table.lookup(key)->second->getter = property;
1507 }
1508 break;
1509 case ObjectLiteral::Property::SETTER:
1510 if (property->emit_store()) {
1511 accessor_table.lookup(key)->second->setter = property;
1512 }
1513 break;
1514 }
1515 }
1516
1517 // Emit code to define accessors, using only a single call to the runtime for
1518 // each pair of corresponding getters and setters.
1519 for (AccessorTable::Iterator it = accessor_table.begin();
1520 it != accessor_table.end();
1521 ++it) {
1522 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001523 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001524 VisitForStackValue(it->first);
1525 EmitAccessor(it->second->getter);
1526 EmitAccessor(it->second->setter);
1527 __ mov(r0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001528 PushOperand(r0);
1529 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001530 }
1531
1532 // Object literals have two parts. The "static" part on the left contains no
1533 // computed property names, and so we can compute its map ahead of time; see
1534 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1535 // starts with the first computed property name, and continues with all
1536 // properties to its right. All the code from above initializes the static
1537 // component of the object literal, and arranges for the map of the result to
1538 // reflect the static order in which the keys appear. For the dynamic
1539 // properties, we compile them into a series of "SetOwnProperty" runtime
1540 // calls. This will preserve insertion order.
1541 for (; property_index < expr->properties()->length(); property_index++) {
1542 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1543
1544 Expression* value = property->value();
1545 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001546 PushOperand(r0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001547 result_saved = true;
1548 }
1549
1550 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001551 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552
1553 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1554 DCHECK(!property->is_computed_name());
1555 VisitForStackValue(value);
1556 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001557 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1559 NO_REGISTERS);
1560 } else {
1561 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1562 VisitForStackValue(value);
1563 if (NeedsHomeObject(value)) {
1564 EmitSetHomeObject(value, 2, property->GetSlot());
1565 }
1566
1567 switch (property->kind()) {
1568 case ObjectLiteral::Property::CONSTANT:
1569 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1570 case ObjectLiteral::Property::COMPUTED:
1571 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001572 PushOperand(Smi::FromInt(NONE));
1573 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1574 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001576 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001577 }
1578 break;
1579
1580 case ObjectLiteral::Property::PROTOTYPE:
1581 UNREACHABLE();
1582 break;
1583
1584 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001585 PushOperand(Smi::FromInt(NONE));
1586 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001587 break;
1588
1589 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001590 PushOperand(Smi::FromInt(NONE));
1591 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592 break;
1593 }
1594 }
1595 }
1596
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001597 if (result_saved) {
1598 context()->PlugTOS();
1599 } else {
1600 context()->Plug(r0);
1601 }
1602}
1603
1604
1605void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1606 Comment cmnt(masm_, "[ ArrayLiteral");
1607
1608 Handle<FixedArray> constant_elements = expr->constant_elements();
1609 bool has_fast_elements =
1610 IsFastObjectElementsKind(expr->constant_elements_kind());
1611 Handle<FixedArrayBase> constant_elements_values(
1612 FixedArrayBase::cast(constant_elements->get(1)));
1613
1614 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1615 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1616 // If the only customer of allocation sites is transitioning, then
1617 // we can turn it off if we don't have anywhere else to transition to.
1618 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1619 }
1620
1621 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1622 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1623 __ mov(r1, Operand(constant_elements));
1624 if (MustCreateArrayLiteralWithRuntime(expr)) {
1625 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1626 __ Push(r3, r2, r1, r0);
1627 __ CallRuntime(Runtime::kCreateArrayLiteral);
1628 } else {
1629 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1630 __ CallStub(&stub);
1631 }
1632 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1633
1634 bool result_saved = false; // Is the result saved to the stack?
1635 ZoneList<Expression*>* subexprs = expr->values();
1636 int length = subexprs->length();
1637
1638 // Emit code to evaluate all the non-constant subexpressions and to store
1639 // them into the newly cloned array.
1640 int array_index = 0;
1641 for (; array_index < length; array_index++) {
1642 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001643 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001644
1645 // If the subexpression is a literal or a simple materialized literal it
1646 // is already set in the cloned array.
1647 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1648
1649 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001650 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001651 result_saved = true;
1652 }
1653 VisitForAccumulatorValue(subexpr);
1654
1655 __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1656 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1657 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1658 Handle<Code> ic =
1659 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1660 CallIC(ic);
1661
1662 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1663 }
1664
1665 // In case the array literal contains spread expressions it has two parts. The
1666 // first part is the "static" array which has a literal index is handled
1667 // above. The second part is the part after the first spread expression
1668 // (inclusive) and these elements gets appended to the array. Note that the
1669 // number elements an iterable produces is unknown ahead of time.
1670 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001671 PopOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001672 result_saved = false;
1673 }
1674 for (; array_index < length; array_index++) {
1675 Expression* subexpr = subexprs->at(array_index);
1676
Ben Murdoch097c5b22016-05-18 11:27:45 +01001677 PushOperand(r0);
1678 DCHECK(!subexpr->IsSpread());
1679 VisitForStackValue(subexpr);
1680 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001681
1682 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1683 }
1684
1685 if (result_saved) {
1686 context()->PlugTOS();
1687 } else {
1688 context()->Plug(r0);
1689 }
1690}
1691
1692
1693void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1694 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1695
1696 Comment cmnt(masm_, "[ Assignment");
1697 SetExpressionPosition(expr, INSERT_BREAK);
1698
1699 Property* property = expr->target()->AsProperty();
1700 LhsKind assign_type = Property::GetAssignType(property);
1701
1702 // Evaluate LHS expression.
1703 switch (assign_type) {
1704 case VARIABLE:
1705 // Nothing to do here.
1706 break;
1707 case NAMED_PROPERTY:
1708 if (expr->is_compound()) {
1709 // We need the receiver both on the stack and in the register.
1710 VisitForStackValue(property->obj());
1711 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1712 } else {
1713 VisitForStackValue(property->obj());
1714 }
1715 break;
1716 case NAMED_SUPER_PROPERTY:
1717 VisitForStackValue(
1718 property->obj()->AsSuperPropertyReference()->this_var());
1719 VisitForAccumulatorValue(
1720 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001721 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001722 if (expr->is_compound()) {
1723 const Register scratch = r1;
1724 __ ldr(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001725 PushOperand(scratch);
1726 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001727 }
1728 break;
1729 case KEYED_SUPER_PROPERTY:
1730 VisitForStackValue(
1731 property->obj()->AsSuperPropertyReference()->this_var());
1732 VisitForStackValue(
1733 property->obj()->AsSuperPropertyReference()->home_object());
1734 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001735 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001736 if (expr->is_compound()) {
1737 const Register scratch = r1;
1738 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001739 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001740 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001741 PushOperand(scratch);
1742 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001743 }
1744 break;
1745 case KEYED_PROPERTY:
1746 if (expr->is_compound()) {
1747 VisitForStackValue(property->obj());
1748 VisitForStackValue(property->key());
1749 __ ldr(LoadDescriptor::ReceiverRegister(),
1750 MemOperand(sp, 1 * kPointerSize));
1751 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1752 } else {
1753 VisitForStackValue(property->obj());
1754 VisitForStackValue(property->key());
1755 }
1756 break;
1757 }
1758
1759 // For compound assignments we need another deoptimization point after the
1760 // variable/property load.
1761 if (expr->is_compound()) {
1762 { AccumulatorValueContext context(this);
1763 switch (assign_type) {
1764 case VARIABLE:
1765 EmitVariableLoad(expr->target()->AsVariableProxy());
1766 PrepareForBailout(expr->target(), TOS_REG);
1767 break;
1768 case NAMED_PROPERTY:
1769 EmitNamedPropertyLoad(property);
1770 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1771 break;
1772 case NAMED_SUPER_PROPERTY:
1773 EmitNamedSuperPropertyLoad(property);
1774 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1775 break;
1776 case KEYED_SUPER_PROPERTY:
1777 EmitKeyedSuperPropertyLoad(property);
1778 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1779 break;
1780 case KEYED_PROPERTY:
1781 EmitKeyedPropertyLoad(property);
1782 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1783 break;
1784 }
1785 }
1786
1787 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001788 PushOperand(r0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001789 VisitForAccumulatorValue(expr->value());
1790
1791 AccumulatorValueContext context(this);
1792 if (ShouldInlineSmiCase(op)) {
1793 EmitInlineSmiBinaryOp(expr->binary_operation(),
1794 op,
1795 expr->target(),
1796 expr->value());
1797 } else {
1798 EmitBinaryOp(expr->binary_operation(), op);
1799 }
1800
1801 // Deoptimization point in case the binary operation may have side effects.
1802 PrepareForBailout(expr->binary_operation(), TOS_REG);
1803 } else {
1804 VisitForAccumulatorValue(expr->value());
1805 }
1806
1807 SetExpressionPosition(expr);
1808
1809 // Store the value.
1810 switch (assign_type) {
1811 case VARIABLE:
1812 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1813 expr->op(), expr->AssignmentSlot());
1814 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1815 context()->Plug(r0);
1816 break;
1817 case NAMED_PROPERTY:
1818 EmitNamedPropertyAssignment(expr);
1819 break;
1820 case NAMED_SUPER_PROPERTY:
1821 EmitNamedSuperPropertyStore(property);
1822 context()->Plug(r0);
1823 break;
1824 case KEYED_SUPER_PROPERTY:
1825 EmitKeyedSuperPropertyStore(property);
1826 context()->Plug(r0);
1827 break;
1828 case KEYED_PROPERTY:
1829 EmitKeyedPropertyAssignment(expr);
1830 break;
1831 }
1832}
1833
1834
1835void FullCodeGenerator::VisitYield(Yield* expr) {
1836 Comment cmnt(masm_, "[ Yield");
1837 SetExpressionPosition(expr);
1838
1839 // Evaluate yielded value first; the initial iterator definition depends on
1840 // this. It stays on the stack while we update the iterator.
1841 VisitForStackValue(expr->expression());
1842
Ben Murdochda12d292016-06-02 14:46:10 +01001843 Label suspend, continuation, post_runtime, resume;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001844
Ben Murdochda12d292016-06-02 14:46:10 +01001845 __ jmp(&suspend);
1846 __ bind(&continuation);
1847 // When we arrive here, the stack top is the resume mode and
1848 // result_register() holds the input value (the argument given to the
1849 // respective resume operation).
1850 __ RecordGeneratorContinuation();
1851 __ pop(r1);
1852 __ cmp(r1, Operand(Smi::FromInt(JSGeneratorObject::RETURN)));
1853 __ b(ne, &resume);
1854 __ push(result_register());
1855 EmitCreateIteratorResult(true);
1856 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001857
Ben Murdochda12d292016-06-02 14:46:10 +01001858 __ bind(&suspend);
1859 OperandStackDepthIncrement(1); // Not popped on this path.
1860 VisitForAccumulatorValue(expr->generator_object());
1861 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1862 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1863 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1864 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1865 __ mov(r1, cp);
1866 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1867 kLRHasBeenSaved, kDontSaveFPRegs);
1868 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1869 __ cmp(sp, r1);
1870 __ b(eq, &post_runtime);
1871 __ push(r0); // generator object
1872 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1873 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1874 __ bind(&post_runtime);
1875 PopOperand(result_register());
1876 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001877
Ben Murdochda12d292016-06-02 14:46:10 +01001878 __ bind(&resume);
1879 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001880}
1881
1882
1883void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1884 Expression *value,
1885 JSGeneratorObject::ResumeMode resume_mode) {
1886 // The value stays in r0, and is ultimately read by the resumed generator, as
1887 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1888 // is read to throw the value when the resumed generator is already closed.
1889 // r1 will hold the generator object until the activation has been resumed.
1890 VisitForStackValue(generator);
1891 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001892 PopOperand(r1);
1893
1894 // Store input value into generator object.
1895 __ str(result_register(),
1896 FieldMemOperand(r1, JSGeneratorObject::kInputOffset));
1897 __ mov(r2, result_register());
1898 __ RecordWriteField(r1, JSGeneratorObject::kInputOffset, r2, r3,
1899 kLRHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001900
1901 // Load suspended function and context.
1902 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
1903 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
1904
1905 // Load receiver and store as the first argument.
1906 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
1907 __ push(r2);
1908
Ben Murdochda12d292016-06-02 14:46:10 +01001909 // Push holes for arguments to generator function. Since the parser forced
1910 // context allocation for any variables in generators, the actual argument
1911 // values have already been copied into the context and these dummy values
1912 // will never be used.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001913 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1914 __ ldr(r3,
1915 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1916 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
1917 Label push_argument_holes, push_frame;
1918 __ bind(&push_argument_holes);
1919 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
1920 __ b(mi, &push_frame);
1921 __ push(r2);
1922 __ jmp(&push_argument_holes);
1923
1924 // Enter a new JavaScript frame, and initialize its slots as they were when
1925 // the generator was suspended.
1926 Label resume_frame, done;
1927 __ bind(&push_frame);
1928 __ bl(&resume_frame);
1929 __ jmp(&done);
1930 __ bind(&resume_frame);
1931 // lr = return address.
1932 // fp = caller's frame pointer.
1933 // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
1934 // cp = callee's context,
1935 // r4 = callee's JS function.
Ben Murdochda12d292016-06-02 14:46:10 +01001936 __ PushStandardFrame(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001937
1938 // Load the operand stack size.
1939 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
1940 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
1941 __ SmiUntag(r3);
1942
1943 // If we are sending a value and there is no operand stack, we can jump back
1944 // in directly.
1945 if (resume_mode == JSGeneratorObject::NEXT) {
1946 Label slow_resume;
1947 __ cmp(r3, Operand(0));
1948 __ b(ne, &slow_resume);
1949 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1950
1951 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
1952 if (FLAG_enable_embedded_constant_pool) {
1953 // Load the new code object's constant pool pointer.
1954 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1955 }
1956
1957 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
1958 __ SmiUntag(r2);
1959 __ add(r3, r3, r2);
1960 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
1961 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001962 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001963 __ Jump(r3);
1964 }
1965 __ bind(&slow_resume);
1966 }
1967
1968 // Otherwise, we push holes for the operand stack and call the runtime to fix
1969 // up the stack and the handlers.
1970 Label push_operand_holes, call_resume;
1971 __ bind(&push_operand_holes);
1972 __ sub(r3, r3, Operand(1), SetCC);
1973 __ b(mi, &call_resume);
1974 __ push(r2);
1975 __ b(&push_operand_holes);
1976 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001977 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001978 DCHECK(!result_register().is(r1));
1979 __ Push(r1, result_register());
1980 __ Push(Smi::FromInt(resume_mode));
1981 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
1982 // Not reached: the runtime call returns elsewhere.
1983 __ stop("not-reached");
1984
1985 __ bind(&done);
1986 context()->Plug(result_register());
1987}
1988
Ben Murdoch097c5b22016-05-18 11:27:45 +01001989void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1990 OperandStackDepthIncrement(2);
1991 __ Push(reg1, reg2);
1992}
1993
1994void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1995 OperandStackDepthDecrement(2);
1996 __ Pop(reg1, reg2);
1997}
1998
1999void FullCodeGenerator::EmitOperandStackDepthCheck() {
2000 if (FLAG_debug_code) {
2001 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
2002 operand_stack_depth_ * kPointerSize;
2003 __ sub(r0, fp, sp);
2004 __ cmp(r0, Operand(expected_diff));
2005 __ Assert(eq, kUnexpectedStackDepth);
2006 }
2007}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002008
2009void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2010 Label allocate, done_allocate;
2011
2012 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate, TAG_OBJECT);
2013 __ b(&done_allocate);
2014
2015 __ bind(&allocate);
2016 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2017 __ CallRuntime(Runtime::kAllocateInNewSpace);
2018
2019 __ bind(&done_allocate);
2020 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
Ben Murdochda12d292016-06-02 14:46:10 +01002021 PopOperand(r2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002022 __ LoadRoot(r3,
2023 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2024 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2025 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2026 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2027 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2028 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
2029 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
2030}
2031
2032
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002033void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2034 Token::Value op,
2035 Expression* left_expr,
2036 Expression* right_expr) {
2037 Label done, smi_case, stub_call;
2038
2039 Register scratch1 = r2;
2040 Register scratch2 = r3;
2041
2042 // Get the arguments.
2043 Register left = r1;
2044 Register right = r0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002045 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002046
2047 // Perform combined smi check on both operands.
2048 __ orr(scratch1, left, Operand(right));
2049 STATIC_ASSERT(kSmiTag == 0);
2050 JumpPatchSite patch_site(masm_);
2051 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2052
2053 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002054 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002055 CallIC(code, expr->BinaryOperationFeedbackId());
2056 patch_site.EmitPatchInfo();
2057 __ jmp(&done);
2058
2059 __ bind(&smi_case);
2060 // Smi case. This code works the same way as the smi-smi case in the type
2061 // recording binary operation stub, see
2062 switch (op) {
2063 case Token::SAR:
2064 __ GetLeastBitsFromSmi(scratch1, right, 5);
2065 __ mov(right, Operand(left, ASR, scratch1));
2066 __ bic(right, right, Operand(kSmiTagMask));
2067 break;
2068 case Token::SHL: {
2069 __ SmiUntag(scratch1, left);
2070 __ GetLeastBitsFromSmi(scratch2, right, 5);
2071 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2072 __ TrySmiTag(right, scratch1, &stub_call);
2073 break;
2074 }
2075 case Token::SHR: {
2076 __ SmiUntag(scratch1, left);
2077 __ GetLeastBitsFromSmi(scratch2, right, 5);
2078 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2079 __ tst(scratch1, Operand(0xc0000000));
2080 __ b(ne, &stub_call);
2081 __ SmiTag(right, scratch1);
2082 break;
2083 }
2084 case Token::ADD:
2085 __ add(scratch1, left, Operand(right), SetCC);
2086 __ b(vs, &stub_call);
2087 __ mov(right, scratch1);
2088 break;
2089 case Token::SUB:
2090 __ sub(scratch1, left, Operand(right), SetCC);
2091 __ b(vs, &stub_call);
2092 __ mov(right, scratch1);
2093 break;
2094 case Token::MUL: {
2095 __ SmiUntag(ip, right);
2096 __ smull(scratch1, scratch2, left, ip);
2097 __ mov(ip, Operand(scratch1, ASR, 31));
2098 __ cmp(ip, Operand(scratch2));
2099 __ b(ne, &stub_call);
2100 __ cmp(scratch1, Operand::Zero());
2101 __ mov(right, Operand(scratch1), LeaveCC, ne);
2102 __ b(ne, &done);
2103 __ add(scratch2, right, Operand(left), SetCC);
2104 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2105 __ b(mi, &stub_call);
2106 break;
2107 }
2108 case Token::BIT_OR:
2109 __ orr(right, left, Operand(right));
2110 break;
2111 case Token::BIT_AND:
2112 __ and_(right, left, Operand(right));
2113 break;
2114 case Token::BIT_XOR:
2115 __ eor(right, left, Operand(right));
2116 break;
2117 default:
2118 UNREACHABLE();
2119 }
2120
2121 __ bind(&done);
2122 context()->Plug(r0);
2123}
2124
2125
2126void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002127 for (int i = 0; i < lit->properties()->length(); i++) {
2128 ObjectLiteral::Property* property = lit->properties()->at(i);
2129 Expression* value = property->value();
2130
Ben Murdoch097c5b22016-05-18 11:27:45 +01002131 Register scratch = r1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002132 if (property->is_static()) {
2133 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2134 } else {
2135 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2136 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002137 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002138 EmitPropertyKey(property, lit->GetIdForProperty(i));
2139
2140 // The static prototype property is read only. We handle the non computed
2141 // property name case in the parser. Since this is the only case where we
2142 // need to check for an own read only property we special case this so we do
2143 // not need to do this for every property.
2144 if (property->is_static() && property->is_computed_name()) {
2145 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2146 __ push(r0);
2147 }
2148
2149 VisitForStackValue(value);
2150 if (NeedsHomeObject(value)) {
2151 EmitSetHomeObject(value, 2, property->GetSlot());
2152 }
2153
2154 switch (property->kind()) {
2155 case ObjectLiteral::Property::CONSTANT:
2156 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2157 case ObjectLiteral::Property::PROTOTYPE:
2158 UNREACHABLE();
2159 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002160 PushOperand(Smi::FromInt(DONT_ENUM));
2161 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2162 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002163 break;
2164
2165 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002166 PushOperand(Smi::FromInt(DONT_ENUM));
2167 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002168 break;
2169
2170 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002171 PushOperand(Smi::FromInt(DONT_ENUM));
2172 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002173 break;
2174
2175 default:
2176 UNREACHABLE();
2177 }
2178 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002179}
2180
2181
2182void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002183 PopOperand(r1);
2184 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002185 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2186 CallIC(code, expr->BinaryOperationFeedbackId());
2187 patch_site.EmitPatchInfo();
2188 context()->Plug(r0);
2189}
2190
2191
2192void FullCodeGenerator::EmitAssignment(Expression* expr,
2193 FeedbackVectorSlot slot) {
2194 DCHECK(expr->IsValidReferenceExpressionOrThis());
2195
2196 Property* prop = expr->AsProperty();
2197 LhsKind assign_type = Property::GetAssignType(prop);
2198
2199 switch (assign_type) {
2200 case VARIABLE: {
2201 Variable* var = expr->AsVariableProxy()->var();
2202 EffectContext context(this);
2203 EmitVariableAssignment(var, Token::ASSIGN, slot);
2204 break;
2205 }
2206 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002207 PushOperand(r0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002208 VisitForAccumulatorValue(prop->obj());
2209 __ Move(StoreDescriptor::ReceiverRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002210 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002211 __ mov(StoreDescriptor::NameRegister(),
2212 Operand(prop->key()->AsLiteral()->value()));
2213 EmitLoadStoreICSlot(slot);
2214 CallStoreIC();
2215 break;
2216 }
2217 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002218 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002219 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2220 VisitForAccumulatorValue(
2221 prop->obj()->AsSuperPropertyReference()->home_object());
2222 // stack: value, this; r0: home_object
2223 Register scratch = r2;
2224 Register scratch2 = r3;
2225 __ mov(scratch, result_register()); // home_object
2226 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2227 __ ldr(scratch2, MemOperand(sp, 0)); // this
2228 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2229 __ str(scratch, MemOperand(sp, 0)); // home_object
2230 // stack: this, home_object; r0: value
2231 EmitNamedSuperPropertyStore(prop);
2232 break;
2233 }
2234 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002235 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002236 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2237 VisitForStackValue(
2238 prop->obj()->AsSuperPropertyReference()->home_object());
2239 VisitForAccumulatorValue(prop->key());
2240 Register scratch = r2;
2241 Register scratch2 = r3;
2242 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2243 // stack: value, this, home_object; r0: key, r3: value
2244 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2245 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2246 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2247 __ str(scratch, MemOperand(sp, kPointerSize));
2248 __ str(r0, MemOperand(sp, 0));
2249 __ Move(r0, scratch2);
2250 // stack: this, home_object, key; r0: value.
2251 EmitKeyedSuperPropertyStore(prop);
2252 break;
2253 }
2254 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002255 PushOperand(r0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002256 VisitForStackValue(prop->obj());
2257 VisitForAccumulatorValue(prop->key());
2258 __ Move(StoreDescriptor::NameRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002259 PopOperands(StoreDescriptor::ValueRegister(),
2260 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002261 EmitLoadStoreICSlot(slot);
2262 Handle<Code> ic =
2263 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2264 CallIC(ic);
2265 break;
2266 }
2267 }
2268 context()->Plug(r0);
2269}
2270
2271
2272void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2273 Variable* var, MemOperand location) {
2274 __ str(result_register(), location);
2275 if (var->IsContextSlot()) {
2276 // RecordWrite may destroy all its register arguments.
2277 __ mov(r3, result_register());
2278 int offset = Context::SlotOffset(var->index());
2279 __ RecordWriteContextSlot(
2280 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2281 }
2282}
2283
2284
2285void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2286 FeedbackVectorSlot slot) {
2287 if (var->IsUnallocated()) {
2288 // Global var, const, or let.
2289 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2290 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2291 EmitLoadStoreICSlot(slot);
2292 CallStoreIC();
2293
2294 } else if (var->mode() == LET && op != Token::INIT) {
2295 // Non-initializing assignment to let variable needs a write barrier.
2296 DCHECK(!var->IsLookupSlot());
2297 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2298 Label assign;
2299 MemOperand location = VarOperand(var, r1);
2300 __ ldr(r3, location);
2301 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2302 __ b(ne, &assign);
2303 __ mov(r3, Operand(var->name()));
2304 __ push(r3);
2305 __ CallRuntime(Runtime::kThrowReferenceError);
2306 // Perform the assignment.
2307 __ bind(&assign);
2308 EmitStoreToStackLocalOrContextSlot(var, location);
2309
2310 } else if (var->mode() == CONST && op != Token::INIT) {
2311 // Assignment to const variable needs a write barrier.
2312 DCHECK(!var->IsLookupSlot());
2313 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2314 Label const_error;
2315 MemOperand location = VarOperand(var, r1);
2316 __ ldr(r3, location);
2317 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2318 __ b(ne, &const_error);
2319 __ mov(r3, Operand(var->name()));
2320 __ push(r3);
2321 __ CallRuntime(Runtime::kThrowReferenceError);
2322 __ bind(&const_error);
2323 __ CallRuntime(Runtime::kThrowConstAssignError);
2324
2325 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2326 // Initializing assignment to const {this} needs a write barrier.
2327 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2328 Label uninitialized_this;
2329 MemOperand location = VarOperand(var, r1);
2330 __ ldr(r3, location);
2331 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2332 __ b(eq, &uninitialized_this);
2333 __ mov(r0, Operand(var->name()));
2334 __ Push(r0);
2335 __ CallRuntime(Runtime::kThrowReferenceError);
2336 __ bind(&uninitialized_this);
2337 EmitStoreToStackLocalOrContextSlot(var, location);
2338
2339 } else if (!var->is_const_mode() ||
2340 (var->mode() == CONST && op == Token::INIT)) {
2341 if (var->IsLookupSlot()) {
2342 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002343 __ Push(var->name());
2344 __ Push(r0);
2345 __ CallRuntime(is_strict(language_mode())
2346 ? Runtime::kStoreLookupSlot_Strict
2347 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002348 } else {
2349 // Assignment to var or initializing assignment to let/const in harmony
2350 // mode.
2351 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2352 MemOperand location = VarOperand(var, r1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002353 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002354 // Check for an uninitialized let binding.
2355 __ ldr(r2, location);
2356 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2357 __ Check(eq, kLetBindingReInitialization);
2358 }
2359 EmitStoreToStackLocalOrContextSlot(var, location);
2360 }
2361
2362 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2363 // Const initializers need a write barrier.
2364 DCHECK(!var->IsParameter()); // No const parameters.
2365 if (var->IsLookupSlot()) {
2366 __ push(r0);
2367 __ mov(r0, Operand(var->name()));
2368 __ Push(cp, r0); // Context and name.
2369 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2370 } else {
2371 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2372 Label skip;
2373 MemOperand location = VarOperand(var, r1);
2374 __ ldr(r2, location);
2375 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2376 __ b(ne, &skip);
2377 EmitStoreToStackLocalOrContextSlot(var, location);
2378 __ bind(&skip);
2379 }
2380
2381 } else {
2382 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2383 if (is_strict(language_mode())) {
2384 __ CallRuntime(Runtime::kThrowConstAssignError);
2385 }
2386 // Silently ignore store in sloppy mode.
2387 }
2388}
2389
2390
2391void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2392 // Assignment to a property, using a named store IC.
2393 Property* prop = expr->target()->AsProperty();
2394 DCHECK(prop != NULL);
2395 DCHECK(prop->key()->IsLiteral());
2396
2397 __ mov(StoreDescriptor::NameRegister(),
2398 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002399 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002400 EmitLoadStoreICSlot(expr->AssignmentSlot());
2401 CallStoreIC();
2402
2403 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2404 context()->Plug(r0);
2405}
2406
2407
2408void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2409 // Assignment to named property of super.
2410 // r0 : value
2411 // stack : receiver ('this'), home_object
2412 DCHECK(prop != NULL);
2413 Literal* key = prop->key()->AsLiteral();
2414 DCHECK(key != NULL);
2415
Ben Murdoch097c5b22016-05-18 11:27:45 +01002416 PushOperand(key->value());
2417 PushOperand(r0);
2418 CallRuntimeWithOperands(is_strict(language_mode())
2419 ? Runtime::kStoreToSuper_Strict
2420 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002421}
2422
2423
2424void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2425 // Assignment to named property of super.
2426 // r0 : value
2427 // stack : receiver ('this'), home_object, key
2428 DCHECK(prop != NULL);
2429
Ben Murdoch097c5b22016-05-18 11:27:45 +01002430 PushOperand(r0);
2431 CallRuntimeWithOperands(is_strict(language_mode())
2432 ? Runtime::kStoreKeyedToSuper_Strict
2433 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002434}
2435
2436
2437void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2438 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002439 PopOperands(StoreDescriptor::ReceiverRegister(),
2440 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002441 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2442
2443 Handle<Code> ic =
2444 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2445 EmitLoadStoreICSlot(expr->AssignmentSlot());
2446 CallIC(ic);
2447
2448 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2449 context()->Plug(r0);
2450}
2451
2452
2453void FullCodeGenerator::VisitProperty(Property* expr) {
2454 Comment cmnt(masm_, "[ Property");
2455 SetExpressionPosition(expr);
2456
2457 Expression* key = expr->key();
2458
2459 if (key->IsPropertyName()) {
2460 if (!expr->IsSuperAccess()) {
2461 VisitForAccumulatorValue(expr->obj());
2462 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2463 EmitNamedPropertyLoad(expr);
2464 } else {
2465 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2466 VisitForStackValue(
2467 expr->obj()->AsSuperPropertyReference()->home_object());
2468 EmitNamedSuperPropertyLoad(expr);
2469 }
2470 } else {
2471 if (!expr->IsSuperAccess()) {
2472 VisitForStackValue(expr->obj());
2473 VisitForAccumulatorValue(expr->key());
2474 __ Move(LoadDescriptor::NameRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002475 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002476 EmitKeyedPropertyLoad(expr);
2477 } else {
2478 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2479 VisitForStackValue(
2480 expr->obj()->AsSuperPropertyReference()->home_object());
2481 VisitForStackValue(expr->key());
2482 EmitKeyedSuperPropertyLoad(expr);
2483 }
2484 }
2485 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2486 context()->Plug(r0);
2487}
2488
2489
2490void FullCodeGenerator::CallIC(Handle<Code> code,
2491 TypeFeedbackId ast_id) {
2492 ic_total_count_++;
2493 // All calls must have a predictable size in full-codegen code to ensure that
2494 // the debugger can patch them correctly.
2495 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2496 NEVER_INLINE_TARGET_ADDRESS);
2497}
2498
2499
2500// Code common for calls using the IC.
2501void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2502 Expression* callee = expr->expression();
2503
2504 // Get the target function.
2505 ConvertReceiverMode convert_mode;
2506 if (callee->IsVariableProxy()) {
2507 { StackValueContext context(this);
2508 EmitVariableLoad(callee->AsVariableProxy());
2509 PrepareForBailout(callee, NO_REGISTERS);
2510 }
2511 // Push undefined as receiver. This is patched in the method prologue if it
2512 // is a sloppy mode method.
2513 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002514 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002515 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2516 } else {
2517 // Load the function from the receiver.
2518 DCHECK(callee->IsProperty());
2519 DCHECK(!callee->AsProperty()->IsSuperAccess());
2520 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2521 EmitNamedPropertyLoad(callee->AsProperty());
2522 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2523 // Push the target function under the receiver.
2524 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002525 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002526 __ str(r0, MemOperand(sp, kPointerSize));
2527 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2528 }
2529
2530 EmitCall(expr, convert_mode);
2531}
2532
2533
2534void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2535 Expression* callee = expr->expression();
2536 DCHECK(callee->IsProperty());
2537 Property* prop = callee->AsProperty();
2538 DCHECK(prop->IsSuperAccess());
2539 SetExpressionPosition(prop);
2540
2541 Literal* key = prop->key()->AsLiteral();
2542 DCHECK(!key->value()->IsSmi());
2543 // Load the function from the receiver.
2544 const Register scratch = r1;
2545 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2546 VisitForStackValue(super_ref->home_object());
2547 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002548 PushOperand(r0);
2549 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002550 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002551 PushOperand(scratch);
2552 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002553
2554 // Stack here:
2555 // - home_object
2556 // - this (receiver)
2557 // - this (receiver) <-- LoadFromSuper will pop here and below.
2558 // - home_object
2559 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002560 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002561
2562 // Replace home_object with target function.
2563 __ str(r0, MemOperand(sp, kPointerSize));
2564
2565 // Stack here:
2566 // - target function
2567 // - this (receiver)
2568 EmitCall(expr);
2569}
2570
2571
2572// Code common for calls using the IC.
2573void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2574 Expression* key) {
2575 // Load the key.
2576 VisitForAccumulatorValue(key);
2577
2578 Expression* callee = expr->expression();
2579
2580 // Load the function from the receiver.
2581 DCHECK(callee->IsProperty());
2582 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2583 __ Move(LoadDescriptor::NameRegister(), r0);
2584 EmitKeyedPropertyLoad(callee->AsProperty());
2585 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2586
2587 // Push the target function under the receiver.
2588 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002589 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002590 __ str(r0, MemOperand(sp, kPointerSize));
2591
2592 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2593}
2594
2595
2596void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2597 Expression* callee = expr->expression();
2598 DCHECK(callee->IsProperty());
2599 Property* prop = callee->AsProperty();
2600 DCHECK(prop->IsSuperAccess());
2601
2602 SetExpressionPosition(prop);
2603 // Load the function from the receiver.
2604 const Register scratch = r1;
2605 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2606 VisitForStackValue(super_ref->home_object());
2607 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002608 PushOperand(r0);
2609 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002610 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002611 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002612 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002613
2614 // Stack here:
2615 // - home_object
2616 // - this (receiver)
2617 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2618 // - home_object
2619 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002620 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002621
2622 // Replace home_object with target function.
2623 __ str(r0, MemOperand(sp, kPointerSize));
2624
2625 // Stack here:
2626 // - target function
2627 // - this (receiver)
2628 EmitCall(expr);
2629}
2630
2631
2632void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2633 // Load the arguments.
2634 ZoneList<Expression*>* args = expr->arguments();
2635 int arg_count = args->length();
2636 for (int i = 0; i < arg_count; i++) {
2637 VisitForStackValue(args->at(i));
2638 }
2639
2640 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002641 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002642 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2643 if (FLAG_trace) {
2644 __ CallRuntime(Runtime::kTraceTailCall);
2645 }
2646 // Update profiling counters before the tail call since we will
2647 // not return to this function.
2648 EmitProfilingCounterHandlingForReturnSequence(true);
2649 }
2650 Handle<Code> ic =
2651 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2652 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002653 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2654 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2655 // Don't assign a type feedback id to the IC, since type feedback is provided
2656 // by the vector above.
2657 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002658 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002659
2660 RecordJSReturnSite(expr);
2661 // Restore context register.
2662 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2663 context()->DropAndPlug(1, r0);
2664}
2665
2666
2667void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2668 // r4: copy of the first argument or undefined if it doesn't exist.
2669 if (arg_count > 0) {
2670 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2671 } else {
2672 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2673 }
2674
2675 // r3: the receiver of the enclosing function.
2676 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2677
2678 // r2: language mode.
2679 __ mov(r2, Operand(Smi::FromInt(language_mode())));
2680
2681 // r1: the start position of the scope the calls resides in.
2682 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2683
2684 // Do the runtime call.
2685 __ Push(r4, r3, r2, r1);
2686 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2687}
2688
2689
2690// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2691void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2692 VariableProxy* callee = expr->expression()->AsVariableProxy();
2693 if (callee->var()->IsLookupSlot()) {
2694 Label slow, done;
2695 SetExpressionPosition(callee);
2696 // Generate code for loading from variables potentially shadowed
2697 // by eval-introduced variables.
2698 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2699
2700 __ bind(&slow);
2701 // Call the runtime to find the function to call (returned in r0)
2702 // and the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002703 __ Push(callee->name());
2704 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2705 PushOperands(r0, r1); // Function, receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002706 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2707
2708 // If fast case code has been generated, emit code to push the
2709 // function and receiver and have the slow path jump around this
2710 // code.
2711 if (done.is_linked()) {
2712 Label call;
2713 __ b(&call);
2714 __ bind(&done);
2715 // Push function.
2716 __ push(r0);
2717 // The receiver is implicitly the global receiver. Indicate this
2718 // by passing the hole to the call function stub.
2719 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2720 __ push(r1);
2721 __ bind(&call);
2722 }
2723 } else {
2724 VisitForStackValue(callee);
2725 // refEnv.WithBaseObject()
2726 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002727 PushOperand(r2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002728 }
2729}
2730
2731
2732void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2733 // In a call to eval, we first call
2734 // RuntimeHidden_asResolvePossiblyDirectEval to resolve the function we need
2735 // to call. Then we call the resolved function using the given arguments.
2736 ZoneList<Expression*>* args = expr->arguments();
2737 int arg_count = args->length();
2738
2739 PushCalleeAndWithBaseObject(expr);
2740
2741 // Push the arguments.
2742 for (int i = 0; i < arg_count; i++) {
2743 VisitForStackValue(args->at(i));
2744 }
2745
2746 // Push a copy of the function (found below the arguments) and
2747 // resolve eval.
2748 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2749 __ push(r1);
2750 EmitResolvePossiblyDirectEval(arg_count);
2751
2752 // Touch up the stack with the resolved function.
2753 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2754
2755 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2756
2757 // Record source position for debugger.
2758 SetCallPosition(expr);
2759 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2760 __ mov(r0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002761 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2762 expr->tail_call_mode()),
2763 RelocInfo::CODE_TARGET);
2764 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002765 RecordJSReturnSite(expr);
2766 // Restore context register.
2767 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2768 context()->DropAndPlug(1, r0);
2769}
2770
2771
2772void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2773 Comment cmnt(masm_, "[ CallNew");
2774 // According to ECMA-262, section 11.2.2, page 44, the function
2775 // expression in new calls must be evaluated before the
2776 // arguments.
2777
2778 // Push constructor on the stack. If it's not a function it's used as
2779 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2780 // ignored.
2781 DCHECK(!expr->expression()->IsSuperPropertyReference());
2782 VisitForStackValue(expr->expression());
2783
2784 // Push the arguments ("left-to-right") on the stack.
2785 ZoneList<Expression*>* args = expr->arguments();
2786 int arg_count = args->length();
2787 for (int i = 0; i < arg_count; i++) {
2788 VisitForStackValue(args->at(i));
2789 }
2790
2791 // Call the construct call builtin that handles allocation and
2792 // constructor invocation.
2793 SetConstructCallPosition(expr);
2794
2795 // Load function and argument count into r1 and r0.
2796 __ mov(r0, Operand(arg_count));
2797 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2798
2799 // Record call targets in unoptimized code.
2800 __ EmitLoadTypeFeedbackVector(r2);
2801 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2802
2803 CallConstructStub stub(isolate());
2804 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002805 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002806 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2807 // Restore context register.
2808 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2809 context()->Plug(r0);
2810}
2811
2812
2813void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2814 SuperCallReference* super_call_ref =
2815 expr->expression()->AsSuperCallReference();
2816 DCHECK_NOT_NULL(super_call_ref);
2817
2818 // Push the super constructor target on the stack (may be null,
2819 // but the Construct builtin can deal with that properly).
2820 VisitForAccumulatorValue(super_call_ref->this_function_var());
2821 __ AssertFunction(result_register());
2822 __ ldr(result_register(),
2823 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2824 __ ldr(result_register(),
2825 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002826 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002827
2828 // Push the arguments ("left-to-right") on the stack.
2829 ZoneList<Expression*>* args = expr->arguments();
2830 int arg_count = args->length();
2831 for (int i = 0; i < arg_count; i++) {
2832 VisitForStackValue(args->at(i));
2833 }
2834
2835 // Call the construct call builtin that handles allocation and
2836 // constructor invocation.
2837 SetConstructCallPosition(expr);
2838
2839 // Load new target into r3.
2840 VisitForAccumulatorValue(super_call_ref->new_target_var());
2841 __ mov(r3, result_register());
2842
2843 // Load function and argument count into r1 and r0.
2844 __ mov(r0, Operand(arg_count));
2845 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2846
2847 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002848 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002849
2850 RecordJSReturnSite(expr);
2851
2852 // Restore context register.
2853 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2854 context()->Plug(r0);
2855}
2856
2857
2858void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2859 ZoneList<Expression*>* args = expr->arguments();
2860 DCHECK(args->length() == 1);
2861
2862 VisitForAccumulatorValue(args->at(0));
2863
2864 Label materialize_true, materialize_false;
2865 Label* if_true = NULL;
2866 Label* if_false = NULL;
2867 Label* fall_through = NULL;
2868 context()->PrepareTest(&materialize_true, &materialize_false,
2869 &if_true, &if_false, &fall_through);
2870
2871 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2872 __ SmiTst(r0);
2873 Split(eq, if_true, if_false, fall_through);
2874
2875 context()->Plug(if_true, if_false);
2876}
2877
2878
2879void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2880 ZoneList<Expression*>* args = expr->arguments();
2881 DCHECK(args->length() == 1);
2882
2883 VisitForAccumulatorValue(args->at(0));
2884
2885 Label materialize_true, materialize_false;
2886 Label* if_true = NULL;
2887 Label* if_false = NULL;
2888 Label* fall_through = NULL;
2889 context()->PrepareTest(&materialize_true, &materialize_false,
2890 &if_true, &if_false, &fall_through);
2891
2892 __ JumpIfSmi(r0, if_false);
2893 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
2894 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2895 Split(ge, if_true, if_false, fall_through);
2896
2897 context()->Plug(if_true, if_false);
2898}
2899
2900
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002901void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2902 ZoneList<Expression*>* args = expr->arguments();
2903 DCHECK(args->length() == 1);
2904
2905 VisitForAccumulatorValue(args->at(0));
2906
2907 Label materialize_true, materialize_false;
2908 Label* if_true = NULL;
2909 Label* if_false = NULL;
2910 Label* fall_through = NULL;
2911 context()->PrepareTest(&materialize_true, &materialize_false,
2912 &if_true, &if_false, &fall_through);
2913
2914 __ JumpIfSmi(r0, if_false);
2915 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2916 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2917 Split(eq, if_true, if_false, fall_through);
2918
2919 context()->Plug(if_true, if_false);
2920}
2921
2922
2923void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2924 ZoneList<Expression*>* args = expr->arguments();
2925 DCHECK(args->length() == 1);
2926
2927 VisitForAccumulatorValue(args->at(0));
2928
2929 Label materialize_true, materialize_false;
2930 Label* if_true = NULL;
2931 Label* if_false = NULL;
2932 Label* fall_through = NULL;
2933 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2934 &if_false, &fall_through);
2935
2936 __ JumpIfSmi(r0, if_false);
2937 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
2938 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2939 Split(eq, if_true, if_false, fall_through);
2940
2941 context()->Plug(if_true, if_false);
2942}
2943
2944
2945void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2946 ZoneList<Expression*>* args = expr->arguments();
2947 DCHECK(args->length() == 1);
2948
2949 VisitForAccumulatorValue(args->at(0));
2950
2951 Label materialize_true, materialize_false;
2952 Label* if_true = NULL;
2953 Label* if_false = NULL;
2954 Label* fall_through = NULL;
2955 context()->PrepareTest(&materialize_true, &materialize_false,
2956 &if_true, &if_false, &fall_through);
2957
2958 __ JumpIfSmi(r0, if_false);
2959 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2960 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2961 Split(eq, if_true, if_false, fall_through);
2962
2963 context()->Plug(if_true, if_false);
2964}
2965
2966
2967void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2968 ZoneList<Expression*>* args = expr->arguments();
2969 DCHECK(args->length() == 1);
2970
2971 VisitForAccumulatorValue(args->at(0));
2972
2973 Label materialize_true, materialize_false;
2974 Label* if_true = NULL;
2975 Label* if_false = NULL;
2976 Label* fall_through = NULL;
2977 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2978 &if_false, &fall_through);
2979
2980 __ JumpIfSmi(r0, if_false);
2981 __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
2982 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2983 Split(eq, if_true, if_false, fall_through);
2984
2985 context()->Plug(if_true, if_false);
2986}
2987
2988
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002989void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2990 ZoneList<Expression*>* args = expr->arguments();
2991 DCHECK(args->length() == 1);
2992 Label done, null, function, non_function_constructor;
2993
2994 VisitForAccumulatorValue(args->at(0));
2995
2996 // If the object is not a JSReceiver, we return null.
2997 __ JumpIfSmi(r0, &null);
2998 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2999 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
3000 // Map is now in r0.
3001 __ b(lt, &null);
3002
Ben Murdochda12d292016-06-02 14:46:10 +01003003 // Return 'Function' for JSFunction and JSBoundFunction objects.
3004 __ cmp(r1, Operand(FIRST_FUNCTION_TYPE));
3005 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
3006 __ b(hs, &function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003007
3008 // Check if the constructor in the map is a JS function.
3009 Register instance_type = r2;
3010 __ GetMapConstructor(r0, r0, r1, instance_type);
3011 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3012 __ b(ne, &non_function_constructor);
3013
3014 // r0 now contains the constructor function. Grab the
3015 // instance class name from there.
3016 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3017 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3018 __ b(&done);
3019
3020 // Functions have class 'Function'.
3021 __ bind(&function);
3022 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3023 __ jmp(&done);
3024
3025 // Objects with a non-function constructor have class 'Object'.
3026 __ bind(&non_function_constructor);
3027 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3028 __ jmp(&done);
3029
3030 // Non-JS objects have class null.
3031 __ bind(&null);
3032 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3033
3034 // All done.
3035 __ bind(&done);
3036
3037 context()->Plug(r0);
3038}
3039
3040
3041void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3042 ZoneList<Expression*>* args = expr->arguments();
3043 DCHECK(args->length() == 1);
3044 VisitForAccumulatorValue(args->at(0)); // Load the object.
3045
3046 Label done;
3047 // If the object is a smi return the object.
3048 __ JumpIfSmi(r0, &done);
3049 // If the object is not a value type, return the object.
3050 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3051 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3052
3053 __ bind(&done);
3054 context()->Plug(r0);
3055}
3056
3057
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003058void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3059 ZoneList<Expression*>* args = expr->arguments();
3060 DCHECK_EQ(3, args->length());
3061
3062 Register string = r0;
3063 Register index = r1;
3064 Register value = r2;
3065
3066 VisitForStackValue(args->at(0)); // index
3067 VisitForStackValue(args->at(1)); // value
3068 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003069 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003070
3071 if (FLAG_debug_code) {
3072 __ SmiTst(value);
3073 __ Check(eq, kNonSmiValue);
3074 __ SmiTst(index);
3075 __ Check(eq, kNonSmiIndex);
3076 __ SmiUntag(index, index);
3077 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3078 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3079 __ SmiTag(index, index);
3080 }
3081
3082 __ SmiUntag(value, value);
3083 __ add(ip,
3084 string,
3085 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3086 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3087 context()->Plug(string);
3088}
3089
3090
3091void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3092 ZoneList<Expression*>* args = expr->arguments();
3093 DCHECK_EQ(3, args->length());
3094
3095 Register string = r0;
3096 Register index = r1;
3097 Register value = r2;
3098
3099 VisitForStackValue(args->at(0)); // index
3100 VisitForStackValue(args->at(1)); // value
3101 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003102 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003103
3104 if (FLAG_debug_code) {
3105 __ SmiTst(value);
3106 __ Check(eq, kNonSmiValue);
3107 __ SmiTst(index);
3108 __ Check(eq, kNonSmiIndex);
3109 __ SmiUntag(index, index);
3110 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3111 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3112 __ SmiTag(index, index);
3113 }
3114
3115 __ SmiUntag(value, value);
3116 __ add(ip,
3117 string,
3118 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3119 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3120 __ strh(value, MemOperand(ip, index));
3121 context()->Plug(string);
3122}
3123
3124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003125void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3126 ZoneList<Expression*>* args = expr->arguments();
3127 DCHECK(args->length() == 1);
3128 VisitForAccumulatorValue(args->at(0));
3129
3130 Label done;
3131 StringCharFromCodeGenerator generator(r0, r1);
3132 generator.GenerateFast(masm_);
3133 __ jmp(&done);
3134
3135 NopRuntimeCallHelper call_helper;
3136 generator.GenerateSlow(masm_, call_helper);
3137
3138 __ bind(&done);
3139 context()->Plug(r1);
3140}
3141
3142
3143void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3144 ZoneList<Expression*>* args = expr->arguments();
3145 DCHECK(args->length() == 2);
3146 VisitForStackValue(args->at(0));
3147 VisitForAccumulatorValue(args->at(1));
3148
3149 Register object = r1;
3150 Register index = r0;
3151 Register result = r3;
3152
Ben Murdoch097c5b22016-05-18 11:27:45 +01003153 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003154
3155 Label need_conversion;
3156 Label index_out_of_range;
3157 Label done;
3158 StringCharCodeAtGenerator generator(object,
3159 index,
3160 result,
3161 &need_conversion,
3162 &need_conversion,
3163 &index_out_of_range,
3164 STRING_INDEX_IS_NUMBER);
3165 generator.GenerateFast(masm_);
3166 __ jmp(&done);
3167
3168 __ bind(&index_out_of_range);
3169 // When the index is out of range, the spec requires us to return
3170 // NaN.
3171 __ LoadRoot(result, Heap::kNanValueRootIndex);
3172 __ jmp(&done);
3173
3174 __ bind(&need_conversion);
3175 // Load the undefined value into the result register, which will
3176 // trigger conversion.
3177 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3178 __ jmp(&done);
3179
3180 NopRuntimeCallHelper call_helper;
3181 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3182
3183 __ bind(&done);
3184 context()->Plug(result);
3185}
3186
3187
3188void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3189 ZoneList<Expression*>* args = expr->arguments();
3190 DCHECK(args->length() == 2);
3191 VisitForStackValue(args->at(0));
3192 VisitForAccumulatorValue(args->at(1));
3193
3194 Register object = r1;
3195 Register index = r0;
3196 Register scratch = r3;
3197 Register result = r0;
3198
Ben Murdoch097c5b22016-05-18 11:27:45 +01003199 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003200
3201 Label need_conversion;
3202 Label index_out_of_range;
3203 Label done;
3204 StringCharAtGenerator generator(object,
3205 index,
3206 scratch,
3207 result,
3208 &need_conversion,
3209 &need_conversion,
3210 &index_out_of_range,
3211 STRING_INDEX_IS_NUMBER);
3212 generator.GenerateFast(masm_);
3213 __ jmp(&done);
3214
3215 __ bind(&index_out_of_range);
3216 // When the index is out of range, the spec requires us to return
3217 // the empty string.
3218 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3219 __ jmp(&done);
3220
3221 __ bind(&need_conversion);
3222 // Move smi zero into the result register, which will trigger
3223 // conversion.
3224 __ mov(result, Operand(Smi::FromInt(0)));
3225 __ jmp(&done);
3226
3227 NopRuntimeCallHelper call_helper;
3228 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3229
3230 __ bind(&done);
3231 context()->Plug(result);
3232}
3233
3234
3235void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3236 ZoneList<Expression*>* args = expr->arguments();
3237 DCHECK_LE(2, args->length());
3238 // Push target, receiver and arguments onto the stack.
3239 for (Expression* const arg : *args) {
3240 VisitForStackValue(arg);
3241 }
3242 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3243 // Move target to r1.
3244 int const argc = args->length() - 2;
3245 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
3246 // Call the target.
3247 __ mov(r0, Operand(argc));
3248 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003249 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003250 // Restore context register.
3251 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3252 // Discard the function left on TOS.
3253 context()->DropAndPlug(1, r0);
3254}
3255
3256
3257void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3258 ZoneList<Expression*>* args = expr->arguments();
3259 VisitForAccumulatorValue(args->at(0));
3260
3261 Label materialize_true, materialize_false;
3262 Label* if_true = NULL;
3263 Label* if_false = NULL;
3264 Label* fall_through = NULL;
3265 context()->PrepareTest(&materialize_true, &materialize_false,
3266 &if_true, &if_false, &fall_through);
3267
3268 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3269 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3270 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3271 Split(eq, if_true, if_false, fall_through);
3272
3273 context()->Plug(if_true, if_false);
3274}
3275
3276
3277void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3278 ZoneList<Expression*>* args = expr->arguments();
3279 DCHECK(args->length() == 1);
3280 VisitForAccumulatorValue(args->at(0));
3281
3282 __ AssertString(r0);
3283
3284 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3285 __ IndexFromHash(r0, r0);
3286
3287 context()->Plug(r0);
3288}
3289
3290
3291void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3292 ZoneList<Expression*>* args = expr->arguments();
3293 DCHECK_EQ(1, args->length());
3294 VisitForAccumulatorValue(args->at(0));
3295 __ AssertFunction(r0);
3296 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3297 __ ldr(r0, FieldMemOperand(r0, Map::kPrototypeOffset));
3298 context()->Plug(r0);
3299}
3300
Ben Murdochda12d292016-06-02 14:46:10 +01003301void FullCodeGenerator::EmitGetOrdinaryHasInstance(CallRuntime* expr) {
3302 DCHECK_EQ(0, expr->arguments()->length());
3303 __ LoadNativeContextSlot(Context::ORDINARY_HAS_INSTANCE_INDEX, r0);
3304 context()->Plug(r0);
3305}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003306
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003307void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3308 DCHECK(expr->arguments()->length() == 0);
3309 ExternalReference debug_is_active =
3310 ExternalReference::debug_is_active_address(isolate());
3311 __ mov(ip, Operand(debug_is_active));
3312 __ ldrb(r0, MemOperand(ip));
3313 __ SmiTag(r0);
3314 context()->Plug(r0);
3315}
3316
3317
3318void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3319 ZoneList<Expression*>* args = expr->arguments();
3320 DCHECK_EQ(2, args->length());
3321 VisitForStackValue(args->at(0));
3322 VisitForStackValue(args->at(1));
3323
3324 Label runtime, done;
3325
3326 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime, TAG_OBJECT);
3327 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
3328 __ pop(r3);
3329 __ pop(r2);
3330 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
3331 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3332 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3333 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
3334 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
3335 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
3336 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3337 __ b(&done);
3338
3339 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003340 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003341
3342 __ bind(&done);
3343 context()->Plug(r0);
3344}
3345
3346
3347void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003348 // Push function.
3349 __ LoadNativeContextSlot(expr->context_index(), r0);
3350 PushOperand(r0);
3351
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003352 // Push undefined as the receiver.
3353 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003354 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003355}
3356
3357
3358void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3359 ZoneList<Expression*>* args = expr->arguments();
3360 int arg_count = args->length();
3361
3362 SetCallPosition(expr);
3363 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3364 __ mov(r0, Operand(arg_count));
3365 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3366 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003367 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003368
Ben Murdochda12d292016-06-02 14:46:10 +01003369 // Restore context register.
3370 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003371}
3372
3373
3374void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3375 switch (expr->op()) {
3376 case Token::DELETE: {
3377 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3378 Property* property = expr->expression()->AsProperty();
3379 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3380
3381 if (property != NULL) {
3382 VisitForStackValue(property->obj());
3383 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003384 CallRuntimeWithOperands(is_strict(language_mode())
3385 ? Runtime::kDeleteProperty_Strict
3386 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003387 context()->Plug(r0);
3388 } else if (proxy != NULL) {
3389 Variable* var = proxy->var();
3390 // Delete of an unqualified identifier is disallowed in strict mode but
3391 // "delete this" is allowed.
3392 bool is_this = var->HasThisName(isolate());
3393 DCHECK(is_sloppy(language_mode()) || is_this);
3394 if (var->IsUnallocatedOrGlobalSlot()) {
3395 __ LoadGlobalObject(r2);
3396 __ mov(r1, Operand(var->name()));
3397 __ Push(r2, r1);
3398 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3399 context()->Plug(r0);
3400 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3401 // Result of deleting non-global, non-dynamic variables is false.
3402 // The subexpression does not have side effects.
3403 context()->Plug(is_this);
3404 } else {
3405 // Non-global variable. Call the runtime to try to delete from the
3406 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003407 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003408 __ CallRuntime(Runtime::kDeleteLookupSlot);
3409 context()->Plug(r0);
3410 }
3411 } else {
3412 // Result of deleting non-property, non-variable reference is true.
3413 // The subexpression may have side effects.
3414 VisitForEffect(expr->expression());
3415 context()->Plug(true);
3416 }
3417 break;
3418 }
3419
3420 case Token::VOID: {
3421 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3422 VisitForEffect(expr->expression());
3423 context()->Plug(Heap::kUndefinedValueRootIndex);
3424 break;
3425 }
3426
3427 case Token::NOT: {
3428 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3429 if (context()->IsEffect()) {
3430 // Unary NOT has no side effects so it's only necessary to visit the
3431 // subexpression. Match the optimizing compiler by not branching.
3432 VisitForEffect(expr->expression());
3433 } else if (context()->IsTest()) {
3434 const TestContext* test = TestContext::cast(context());
3435 // The labels are swapped for the recursive call.
3436 VisitForControl(expr->expression(),
3437 test->false_label(),
3438 test->true_label(),
3439 test->fall_through());
3440 context()->Plug(test->true_label(), test->false_label());
3441 } else {
3442 // We handle value contexts explicitly rather than simply visiting
3443 // for control and plugging the control flow into the context,
3444 // because we need to prepare a pair of extra administrative AST ids
3445 // for the optimizing compiler.
3446 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3447 Label materialize_true, materialize_false, done;
3448 VisitForControl(expr->expression(),
3449 &materialize_false,
3450 &materialize_true,
3451 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003452 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003453 __ bind(&materialize_true);
3454 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3455 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3456 if (context()->IsStackValue()) __ push(r0);
3457 __ jmp(&done);
3458 __ bind(&materialize_false);
3459 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3460 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3461 if (context()->IsStackValue()) __ push(r0);
3462 __ bind(&done);
3463 }
3464 break;
3465 }
3466
3467 case Token::TYPEOF: {
3468 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3469 {
3470 AccumulatorValueContext context(this);
3471 VisitForTypeofValue(expr->expression());
3472 }
3473 __ mov(r3, r0);
3474 TypeofStub typeof_stub(isolate());
3475 __ CallStub(&typeof_stub);
3476 context()->Plug(r0);
3477 break;
3478 }
3479
3480 default:
3481 UNREACHABLE();
3482 }
3483}
3484
3485
3486void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3487 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3488
3489 Comment cmnt(masm_, "[ CountOperation");
3490
3491 Property* prop = expr->expression()->AsProperty();
3492 LhsKind assign_type = Property::GetAssignType(prop);
3493
3494 // Evaluate expression and get value.
3495 if (assign_type == VARIABLE) {
3496 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3497 AccumulatorValueContext context(this);
3498 EmitVariableLoad(expr->expression()->AsVariableProxy());
3499 } else {
3500 // Reserve space for result of postfix operation.
3501 if (expr->is_postfix() && !context()->IsEffect()) {
3502 __ mov(ip, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003503 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003504 }
3505 switch (assign_type) {
3506 case NAMED_PROPERTY: {
3507 // Put the object both on the stack and in the register.
3508 VisitForStackValue(prop->obj());
3509 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3510 EmitNamedPropertyLoad(prop);
3511 break;
3512 }
3513
3514 case NAMED_SUPER_PROPERTY: {
3515 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3516 VisitForAccumulatorValue(
3517 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003518 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003519 const Register scratch = r1;
3520 __ ldr(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003521 PushOperand(scratch);
3522 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003523 EmitNamedSuperPropertyLoad(prop);
3524 break;
3525 }
3526
3527 case KEYED_SUPER_PROPERTY: {
3528 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3529 VisitForStackValue(
3530 prop->obj()->AsSuperPropertyReference()->home_object());
3531 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003532 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003533 const Register scratch = r1;
3534 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003535 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003536 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003537 PushOperand(scratch);
3538 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003539 EmitKeyedSuperPropertyLoad(prop);
3540 break;
3541 }
3542
3543 case KEYED_PROPERTY: {
3544 VisitForStackValue(prop->obj());
3545 VisitForStackValue(prop->key());
3546 __ ldr(LoadDescriptor::ReceiverRegister(),
3547 MemOperand(sp, 1 * kPointerSize));
3548 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3549 EmitKeyedPropertyLoad(prop);
3550 break;
3551 }
3552
3553 case VARIABLE:
3554 UNREACHABLE();
3555 }
3556 }
3557
3558 // We need a second deoptimization point after loading the value
3559 // in case evaluating the property load my have a side effect.
3560 if (assign_type == VARIABLE) {
3561 PrepareForBailout(expr->expression(), TOS_REG);
3562 } else {
3563 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3564 }
3565
3566 // Inline smi case if we are in a loop.
3567 Label stub_call, done;
3568 JumpPatchSite patch_site(masm_);
3569
3570 int count_value = expr->op() == Token::INC ? 1 : -1;
3571 if (ShouldInlineSmiCase(expr->op())) {
3572 Label slow;
3573 patch_site.EmitJumpIfNotSmi(r0, &slow);
3574
3575 // Save result for postfix expressions.
3576 if (expr->is_postfix()) {
3577 if (!context()->IsEffect()) {
3578 // Save the result on the stack. If we have a named or keyed property
3579 // we store the result under the receiver that is currently on top
3580 // of the stack.
3581 switch (assign_type) {
3582 case VARIABLE:
3583 __ push(r0);
3584 break;
3585 case NAMED_PROPERTY:
3586 __ str(r0, MemOperand(sp, kPointerSize));
3587 break;
3588 case NAMED_SUPER_PROPERTY:
3589 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3590 break;
3591 case KEYED_PROPERTY:
3592 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3593 break;
3594 case KEYED_SUPER_PROPERTY:
3595 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3596 break;
3597 }
3598 }
3599 }
3600
3601 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
3602 __ b(vc, &done);
3603 // Call stub. Undo operation first.
3604 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
3605 __ jmp(&stub_call);
3606 __ bind(&slow);
3607 }
Ben Murdochda12d292016-06-02 14:46:10 +01003608
3609 // Convert old value into a number.
3610 ToNumberStub convert_stub(isolate());
3611 __ CallStub(&convert_stub);
3612 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003613
3614 // Save result for postfix expressions.
3615 if (expr->is_postfix()) {
3616 if (!context()->IsEffect()) {
3617 // Save the result on the stack. If we have a named or keyed property
3618 // we store the result under the receiver that is currently on top
3619 // of the stack.
3620 switch (assign_type) {
3621 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003622 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003623 break;
3624 case NAMED_PROPERTY:
3625 __ str(r0, MemOperand(sp, kPointerSize));
3626 break;
3627 case NAMED_SUPER_PROPERTY:
3628 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3629 break;
3630 case KEYED_PROPERTY:
3631 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3632 break;
3633 case KEYED_SUPER_PROPERTY:
3634 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3635 break;
3636 }
3637 }
3638 }
3639
3640
3641 __ bind(&stub_call);
3642 __ mov(r1, r0);
3643 __ mov(r0, Operand(Smi::FromInt(count_value)));
3644
3645 SetExpressionPosition(expr);
3646
Ben Murdoch097c5b22016-05-18 11:27:45 +01003647 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003648 CallIC(code, expr->CountBinOpFeedbackId());
3649 patch_site.EmitPatchInfo();
3650 __ bind(&done);
3651
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003652 // Store the value returned in r0.
3653 switch (assign_type) {
3654 case VARIABLE:
3655 if (expr->is_postfix()) {
3656 { EffectContext context(this);
3657 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3658 Token::ASSIGN, expr->CountSlot());
3659 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3660 context.Plug(r0);
3661 }
3662 // For all contexts except EffectConstant We have the result on
3663 // top of the stack.
3664 if (!context()->IsEffect()) {
3665 context()->PlugTOS();
3666 }
3667 } else {
3668 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3669 Token::ASSIGN, expr->CountSlot());
3670 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3671 context()->Plug(r0);
3672 }
3673 break;
3674 case NAMED_PROPERTY: {
3675 __ mov(StoreDescriptor::NameRegister(),
3676 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003677 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003678 EmitLoadStoreICSlot(expr->CountSlot());
3679 CallStoreIC();
3680 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3681 if (expr->is_postfix()) {
3682 if (!context()->IsEffect()) {
3683 context()->PlugTOS();
3684 }
3685 } else {
3686 context()->Plug(r0);
3687 }
3688 break;
3689 }
3690 case NAMED_SUPER_PROPERTY: {
3691 EmitNamedSuperPropertyStore(prop);
3692 if (expr->is_postfix()) {
3693 if (!context()->IsEffect()) {
3694 context()->PlugTOS();
3695 }
3696 } else {
3697 context()->Plug(r0);
3698 }
3699 break;
3700 }
3701 case KEYED_SUPER_PROPERTY: {
3702 EmitKeyedSuperPropertyStore(prop);
3703 if (expr->is_postfix()) {
3704 if (!context()->IsEffect()) {
3705 context()->PlugTOS();
3706 }
3707 } else {
3708 context()->Plug(r0);
3709 }
3710 break;
3711 }
3712 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003713 PopOperands(StoreDescriptor::ReceiverRegister(),
3714 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003715 Handle<Code> ic =
3716 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3717 EmitLoadStoreICSlot(expr->CountSlot());
3718 CallIC(ic);
3719 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3720 if (expr->is_postfix()) {
3721 if (!context()->IsEffect()) {
3722 context()->PlugTOS();
3723 }
3724 } else {
3725 context()->Plug(r0);
3726 }
3727 break;
3728 }
3729 }
3730}
3731
3732
3733void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3734 Expression* sub_expr,
3735 Handle<String> check) {
3736 Label materialize_true, materialize_false;
3737 Label* if_true = NULL;
3738 Label* if_false = NULL;
3739 Label* fall_through = NULL;
3740 context()->PrepareTest(&materialize_true, &materialize_false,
3741 &if_true, &if_false, &fall_through);
3742
3743 { AccumulatorValueContext context(this);
3744 VisitForTypeofValue(sub_expr);
3745 }
3746 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3747
3748 Factory* factory = isolate()->factory();
3749 if (String::Equals(check, factory->number_string())) {
3750 __ JumpIfSmi(r0, if_true);
3751 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3752 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3753 __ cmp(r0, ip);
3754 Split(eq, if_true, if_false, fall_through);
3755 } else if (String::Equals(check, factory->string_string())) {
3756 __ JumpIfSmi(r0, if_false);
3757 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
3758 Split(lt, if_true, if_false, fall_through);
3759 } else if (String::Equals(check, factory->symbol_string())) {
3760 __ JumpIfSmi(r0, if_false);
3761 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
3762 Split(eq, if_true, if_false, fall_through);
3763 } else if (String::Equals(check, factory->boolean_string())) {
3764 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3765 __ b(eq, if_true);
3766 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
3767 Split(eq, if_true, if_false, fall_through);
3768 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003769 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3770 __ b(eq, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003771 __ JumpIfSmi(r0, if_false);
3772 // Check for undetectable objects => true.
3773 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3774 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3775 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3776 Split(ne, if_true, if_false, fall_through);
3777
3778 } else if (String::Equals(check, factory->function_string())) {
3779 __ JumpIfSmi(r0, if_false);
3780 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3781 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3782 __ and_(r1, r1,
3783 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3784 __ cmp(r1, Operand(1 << Map::kIsCallable));
3785 Split(eq, if_true, if_false, fall_through);
3786 } else if (String::Equals(check, factory->object_string())) {
3787 __ JumpIfSmi(r0, if_false);
3788 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3789 __ b(eq, if_true);
3790 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3791 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
3792 __ b(lt, if_false);
3793 // Check for callable or undetectable objects => false.
3794 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3795 __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3796 Split(eq, if_true, if_false, fall_through);
3797// clang-format off
3798#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3799 } else if (String::Equals(check, factory->type##_string())) { \
3800 __ JumpIfSmi(r0, if_false); \
3801 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); \
3802 __ CompareRoot(r0, Heap::k##Type##MapRootIndex); \
3803 Split(eq, if_true, if_false, fall_through);
3804 SIMD128_TYPES(SIMD128_TYPE)
3805#undef SIMD128_TYPE
3806 // clang-format on
3807 } else {
3808 if (if_false != fall_through) __ jmp(if_false);
3809 }
3810 context()->Plug(if_true, if_false);
3811}
3812
3813
3814void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3815 Comment cmnt(masm_, "[ CompareOperation");
3816 SetExpressionPosition(expr);
3817
3818 // First we try a fast inlined version of the compare when one of
3819 // the operands is a literal.
3820 if (TryLiteralCompare(expr)) return;
3821
3822 // Always perform the comparison for its control flow. Pack the result
3823 // into the expression's context after the comparison is performed.
3824 Label materialize_true, materialize_false;
3825 Label* if_true = NULL;
3826 Label* if_false = NULL;
3827 Label* fall_through = NULL;
3828 context()->PrepareTest(&materialize_true, &materialize_false,
3829 &if_true, &if_false, &fall_through);
3830
3831 Token::Value op = expr->op();
3832 VisitForStackValue(expr->left());
3833 switch (op) {
3834 case Token::IN:
3835 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003836 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003837 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3838 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3839 Split(eq, if_true, if_false, fall_through);
3840 break;
3841
3842 case Token::INSTANCEOF: {
3843 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003844 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003845 InstanceOfStub stub(isolate());
3846 __ CallStub(&stub);
3847 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3848 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3849 Split(eq, if_true, if_false, fall_through);
3850 break;
3851 }
3852
3853 default: {
3854 VisitForAccumulatorValue(expr->right());
3855 Condition cond = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003856 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003857
3858 bool inline_smi_code = ShouldInlineSmiCase(op);
3859 JumpPatchSite patch_site(masm_);
3860 if (inline_smi_code) {
3861 Label slow_case;
3862 __ orr(r2, r0, Operand(r1));
3863 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
3864 __ cmp(r1, r0);
3865 Split(cond, if_true, if_false, NULL);
3866 __ bind(&slow_case);
3867 }
3868
Ben Murdoch097c5b22016-05-18 11:27:45 +01003869 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003870 CallIC(ic, expr->CompareOperationFeedbackId());
3871 patch_site.EmitPatchInfo();
3872 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3873 __ cmp(r0, Operand::Zero());
3874 Split(cond, if_true, if_false, fall_through);
3875 }
3876 }
3877
3878 // Convert the result of the comparison into one expected for this
3879 // expression's context.
3880 context()->Plug(if_true, if_false);
3881}
3882
3883
3884void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3885 Expression* sub_expr,
3886 NilValue nil) {
3887 Label materialize_true, materialize_false;
3888 Label* if_true = NULL;
3889 Label* if_false = NULL;
3890 Label* fall_through = NULL;
3891 context()->PrepareTest(&materialize_true, &materialize_false,
3892 &if_true, &if_false, &fall_through);
3893
3894 VisitForAccumulatorValue(sub_expr);
3895 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3896 if (expr->op() == Token::EQ_STRICT) {
3897 Heap::RootListIndex nil_value = nil == kNullValue ?
3898 Heap::kNullValueRootIndex :
3899 Heap::kUndefinedValueRootIndex;
3900 __ LoadRoot(r1, nil_value);
3901 __ cmp(r0, r1);
3902 Split(eq, if_true, if_false, fall_through);
3903 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003904 __ JumpIfSmi(r0, if_false);
3905 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3906 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3907 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3908 Split(ne, if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003909 }
3910 context()->Plug(if_true, if_false);
3911}
3912
3913
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003914Register FullCodeGenerator::result_register() {
3915 return r0;
3916}
3917
3918
3919Register FullCodeGenerator::context_register() {
3920 return cp;
3921}
3922
Ben Murdochda12d292016-06-02 14:46:10 +01003923void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3924 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3925 __ ldr(value, MemOperand(fp, frame_offset));
3926}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003927
3928void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3929 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3930 __ str(value, MemOperand(fp, frame_offset));
3931}
3932
3933
3934void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3935 __ ldr(dst, ContextMemOperand(cp, context_index));
3936}
3937
3938
3939void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3940 Scope* closure_scope = scope()->ClosureScope();
3941 if (closure_scope->is_script_scope() ||
3942 closure_scope->is_module_scope()) {
3943 // Contexts nested in the native context have a canonical empty function
3944 // as their closure, not the anonymous closure containing the global
3945 // code.
3946 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3947 } else if (closure_scope->is_eval_scope()) {
3948 // Contexts created by a call to eval have the same closure as the
3949 // context calling eval, not the anonymous closure containing the eval
3950 // code. Fetch it from the context.
3951 __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3952 } else {
3953 DCHECK(closure_scope->is_function_scope());
3954 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3955 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003956 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003957}
3958
3959
3960// ----------------------------------------------------------------------------
3961// Non-local control flow support.
3962
3963void FullCodeGenerator::EnterFinallyBlock() {
3964 DCHECK(!result_register().is(r1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003965 // Store pending message while executing finally block.
3966 ExternalReference pending_message_obj =
3967 ExternalReference::address_of_pending_message_obj(isolate());
3968 __ mov(ip, Operand(pending_message_obj));
3969 __ ldr(r1, MemOperand(ip));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003970 PushOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003971
3972 ClearPendingMessage();
3973}
3974
3975
3976void FullCodeGenerator::ExitFinallyBlock() {
3977 DCHECK(!result_register().is(r1));
3978 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003979 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003980 ExternalReference pending_message_obj =
3981 ExternalReference::address_of_pending_message_obj(isolate());
3982 __ mov(ip, Operand(pending_message_obj));
3983 __ str(r1, MemOperand(ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003984}
3985
3986
3987void FullCodeGenerator::ClearPendingMessage() {
3988 DCHECK(!result_register().is(r1));
3989 ExternalReference pending_message_obj =
3990 ExternalReference::address_of_pending_message_obj(isolate());
3991 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
3992 __ mov(ip, Operand(pending_message_obj));
3993 __ str(r1, MemOperand(ip));
3994}
3995
3996
Ben Murdoch097c5b22016-05-18 11:27:45 +01003997void FullCodeGenerator::DeferredCommands::EmitCommands() {
3998 DCHECK(!result_register().is(r1));
3999 __ Pop(result_register()); // Restore the accumulator.
4000 __ Pop(r1); // Get the token.
4001 for (DeferredCommand cmd : commands_) {
4002 Label skip;
4003 __ cmp(r1, Operand(Smi::FromInt(cmd.token)));
4004 __ b(ne, &skip);
4005 switch (cmd.command) {
4006 case kReturn:
4007 codegen_->EmitUnwindAndReturn();
4008 break;
4009 case kThrow:
4010 __ Push(result_register());
4011 __ CallRuntime(Runtime::kReThrow);
4012 break;
4013 case kContinue:
4014 codegen_->EmitContinue(cmd.target);
4015 break;
4016 case kBreak:
4017 codegen_->EmitBreak(cmd.target);
4018 break;
4019 }
4020 __ bind(&skip);
4021 }
4022}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004023
4024#undef __
4025
4026
4027static Address GetInterruptImmediateLoadAddress(Address pc) {
4028 Address load_address = pc - 2 * Assembler::kInstrSize;
4029 if (!FLAG_enable_embedded_constant_pool) {
4030 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4031 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
4032 // This is an extended constant pool lookup.
4033 if (CpuFeatures::IsSupported(ARMv7)) {
4034 load_address -= 2 * Assembler::kInstrSize;
4035 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4036 DCHECK(Assembler::IsMovT(
4037 Memory::int32_at(load_address + Assembler::kInstrSize)));
4038 } else {
4039 load_address -= 4 * Assembler::kInstrSize;
4040 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4041 DCHECK(Assembler::IsOrrImmed(
4042 Memory::int32_at(load_address + Assembler::kInstrSize)));
4043 DCHECK(Assembler::IsOrrImmed(
4044 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4045 DCHECK(Assembler::IsOrrImmed(
4046 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
4047 }
4048 } else if (CpuFeatures::IsSupported(ARMv7) &&
4049 Assembler::IsMovT(Memory::int32_at(load_address))) {
4050 // This is a movw / movt immediate load.
4051 load_address -= Assembler::kInstrSize;
4052 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4053 } else if (!CpuFeatures::IsSupported(ARMv7) &&
4054 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
4055 // This is a mov / orr immediate load.
4056 load_address -= 3 * Assembler::kInstrSize;
4057 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4058 DCHECK(Assembler::IsOrrImmed(
4059 Memory::int32_at(load_address + Assembler::kInstrSize)));
4060 DCHECK(Assembler::IsOrrImmed(
4061 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4062 } else {
4063 // This is a small constant pool lookup.
4064 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4065 }
4066 return load_address;
4067}
4068
4069
4070void BackEdgeTable::PatchAt(Code* unoptimized_code,
4071 Address pc,
4072 BackEdgeState target_state,
4073 Code* replacement_code) {
4074 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4075 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4076 Isolate* isolate = unoptimized_code->GetIsolate();
4077 CodePatcher patcher(isolate, branch_address, 1);
4078 switch (target_state) {
4079 case INTERRUPT:
4080 {
4081 // <decrement profiling counter>
4082 // bpl ok
4083 // ; load interrupt stub address into ip - either of (for ARMv7):
4084 // ; <small cp load> | <extended cp load> | <immediate load>
4085 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
4086 // | movt ip, #imm | movw ip, #imm
4087 // | ldr ip, [pp, ip]
4088 // ; or (for ARMv6):
4089 // ; <small cp load> | <extended cp load> | <immediate load>
4090 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4091 // | orr ip, ip, #imm> | orr ip, ip, #imm
4092 // | orr ip, ip, #imm> | orr ip, ip, #imm
4093 // | orr ip, ip, #imm> | orr ip, ip, #imm
4094 // blx ip
4095 // <reset profiling counter>
4096 // ok-label
4097
4098 // Calculate branch offset to the ok-label - this is the difference
4099 // between the branch address and |pc| (which points at <blx ip>) plus
4100 // kProfileCounterResetSequence instructions
4101 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
4102 kProfileCounterResetSequenceLength;
4103 patcher.masm()->b(branch_offset, pl);
4104 break;
4105 }
4106 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004107 // <decrement profiling counter>
4108 // mov r0, r0 (NOP)
4109 // ; load on-stack replacement address into ip - either of (for ARMv7):
4110 // ; <small cp load> | <extended cp load> | <immediate load>
4111 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
4112 // | movt ip, #imm> | movw ip, #imm
4113 // | ldr ip, [pp, ip]
4114 // ; or (for ARMv6):
4115 // ; <small cp load> | <extended cp load> | <immediate load>
4116 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4117 // | orr ip, ip, #imm> | orr ip, ip, #imm
4118 // | orr ip, ip, #imm> | orr ip, ip, #imm
4119 // | orr ip, ip, #imm> | orr ip, ip, #imm
4120 // blx ip
4121 // <reset profiling counter>
4122 // ok-label
4123 patcher.masm()->nop();
4124 break;
4125 }
4126
4127 // Replace the call address.
4128 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
4129 unoptimized_code, replacement_code->entry());
4130
4131 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4132 unoptimized_code, pc_immediate_load_address, replacement_code);
4133}
4134
4135
4136BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4137 Isolate* isolate,
4138 Code* unoptimized_code,
4139 Address pc) {
4140 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
4141
4142 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4143 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01004144#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004145 Address interrupt_address = Assembler::target_address_at(
4146 pc_immediate_load_address, unoptimized_code);
Ben Murdochda12d292016-06-02 14:46:10 +01004147#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004148
4149 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4150 DCHECK(interrupt_address ==
4151 isolate->builtins()->InterruptCheck()->entry());
4152 return INTERRUPT;
4153 }
4154
4155 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
4156
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004157 DCHECK(interrupt_address ==
Ben Murdochda12d292016-06-02 14:46:10 +01004158 isolate->builtins()->OnStackReplacement()->entry());
4159 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004160}
4161
4162
4163} // namespace internal
4164} // namespace v8
4165
4166#endif // V8_TARGET_ARCH_ARM