blob: 91253e3fd2cc38df1d3577bb226bb76e3da22e3b [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/arm/code-stubs-arm.h"
17#include "src/arm/macro-assembler-arm.h"
18
19namespace v8 {
20namespace internal {
21
Ben Murdoch097c5b22016-05-18 11:27:45 +010022#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30class JumpPatchSite BASE_EMBEDDED {
31 public:
32 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
33#ifdef DEBUG
34 info_emitted_ = false;
35#endif
36 }
37
38 ~JumpPatchSite() {
39 DCHECK(patch_site_.is_bound() == info_emitted_);
40 }
41
42 // When initially emitting this ensure that a jump is always generated to skip
43 // the inlined smi code.
44 void EmitJumpIfNotSmi(Register reg, Label* target) {
45 DCHECK(!patch_site_.is_bound() && !info_emitted_);
46 Assembler::BlockConstPoolScope block_const_pool(masm_);
47 __ bind(&patch_site_);
48 __ cmp(reg, Operand(reg));
49 __ b(eq, target); // Always taken before patched.
50 }
51
52 // When initially emitting this ensure that a jump is never generated to skip
53 // the inlined smi code.
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockConstPoolScope block_const_pool(masm_);
57 __ bind(&patch_site_);
58 __ cmp(reg, Operand(reg));
59 __ b(ne, target); // Never taken before patched.
60 }
61
62 void EmitPatchInfo() {
63 // Block literal pool emission whilst recording patch site information.
64 Assembler::BlockConstPoolScope block_const_pool(masm_);
65 if (patch_site_.is_bound()) {
66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
67 Register reg;
68 reg.set_code(delta_to_patch_site / kOff12Mask);
69 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
70#ifdef DEBUG
71 info_emitted_ = true;
72#endif
73 } else {
74 __ nop(); // Signals no inlined code.
75 }
76 }
77
78 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010079 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 MacroAssembler* masm_;
81 Label patch_site_;
82#ifdef DEBUG
83 bool info_emitted_;
84#endif
85};
86
87
88// Generate code for a JS function. On entry to the function the receiver
89// and arguments have been pushed on the stack left to right. The actual
90// argument count matches the formal parameter count expected by the
91// function.
92//
93// The live registers are:
94// o r1: the JS function object being called (i.e., ourselves)
95// o r3: the new target value
96// o cp: our context
97// o pp: our caller's constant pool pointer (if enabled)
98// o fp: our caller's frame pointer
99// o sp: stack pointer
100// o lr: return address
101//
102// The function builds a JS frame. Please see JavaScriptFrameConstants in
103// frames-arm.h for its layout.
104void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
115 __ ldr(r2, MemOperand(sp, receiver_offset));
116 __ AssertNotSmi(r2);
117 __ CompareObjectType(r2, r2, no_reg, FIRST_JS_RECEIVER_TYPE);
118 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119 }
120
121 // Open a frame scope to indicate that there is a frame on the stack. The
122 // MANUAL indicates that the scope shouldn't actually generate code to set up
123 // the frame (that is done below).
124 FrameScope frame_scope(masm_, StackFrame::MANUAL);
125
126 info->set_prologue_offset(masm_->pc_offset());
127 __ Prologue(info->GeneratePreagedPrologue());
128
129 { Comment cmnt(masm_, "[ Allocate locals");
130 int locals_count = info->scope()->num_stack_slots();
131 // Generators allocate locals, if any, in context slots.
132 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100133 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134 if (locals_count > 0) {
135 if (locals_count >= 128) {
136 Label ok;
137 __ sub(r9, sp, Operand(locals_count * kPointerSize));
138 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
139 __ cmp(r9, Operand(r2));
140 __ b(hs, &ok);
141 __ CallRuntime(Runtime::kThrowStackOverflow);
142 __ bind(&ok);
143 }
144 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
145 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
146 if (locals_count >= kMaxPushes) {
147 int loop_iterations = locals_count / kMaxPushes;
148 __ mov(r2, Operand(loop_iterations));
149 Label loop_header;
150 __ bind(&loop_header);
151 // Do pushes.
152 for (int i = 0; i < kMaxPushes; i++) {
153 __ push(r9);
154 }
155 // Continue loop if not done.
156 __ sub(r2, r2, Operand(1), SetCC);
157 __ b(&loop_header, ne);
158 }
159 int remaining = locals_count % kMaxPushes;
160 // Emit the remaining pushes.
161 for (int i = 0; i < remaining; i++) {
162 __ push(r9);
163 }
164 }
165 }
166
167 bool function_in_register_r1 = true;
168
169 // Possibly allocate a local context.
170 if (info->scope()->num_heap_slots() > 0) {
171 // Argument to NewContext is the function, which is still in r1.
172 Comment cmnt(masm_, "[ Allocate context");
173 bool need_write_barrier = true;
174 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
175 if (info->scope()->is_script_scope()) {
176 __ push(r1);
177 __ Push(info->scope()->GetScopeInfo(info->isolate()));
178 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100179 PrepareForBailoutForId(BailoutId::ScriptContext(),
180 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000181 // The new target value is not used, clobbering is safe.
182 DCHECK_NULL(info->scope()->new_target_var());
183 } else {
184 if (info->scope()->new_target_var() != nullptr) {
185 __ push(r3); // Preserve new target.
186 }
187 if (slots <= FastNewContextStub::kMaximumSlots) {
188 FastNewContextStub stub(isolate(), slots);
189 __ CallStub(&stub);
190 // Result of FastNewContextStub is always in new space.
191 need_write_barrier = false;
192 } else {
193 __ push(r1);
194 __ CallRuntime(Runtime::kNewFunctionContext);
195 }
196 if (info->scope()->new_target_var() != nullptr) {
197 __ pop(r3); // Preserve new target.
198 }
199 }
200 function_in_register_r1 = false;
201 // Context is returned in r0. It replaces the context passed to us.
202 // It's saved in the stack and kept live in cp.
203 __ mov(cp, r0);
204 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
205 // Copy any necessary parameters into the context.
206 int num_parameters = info->scope()->num_parameters();
207 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
208 for (int i = first_parameter; i < num_parameters; i++) {
209 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
210 if (var->IsContextSlot()) {
211 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
212 (num_parameters - 1 - i) * kPointerSize;
213 // Load parameter from stack.
214 __ ldr(r0, MemOperand(fp, parameter_offset));
215 // Store it in the context.
216 MemOperand target = ContextMemOperand(cp, var->index());
217 __ str(r0, target);
218
219 // Update the write barrier.
220 if (need_write_barrier) {
221 __ RecordWriteContextSlot(cp, target.offset(), r0, r2,
222 kLRHasBeenSaved, kDontSaveFPRegs);
223 } else if (FLAG_debug_code) {
224 Label done;
225 __ JumpIfInNewSpace(cp, r0, &done);
226 __ Abort(kExpectedNewSpaceObject);
227 __ bind(&done);
228 }
229 }
230 }
231 }
232
233 // Register holding this function and new target are both trashed in case we
234 // bailout here. But since that can happen only when new target is not used
235 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100236 PrepareForBailoutForId(BailoutId::FunctionContext(),
237 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000238
239 // Possibly set up a local binding to the this function which is used in
240 // derived constructors with super calls.
241 Variable* this_function_var = scope()->this_function_var();
242 if (this_function_var != nullptr) {
243 Comment cmnt(masm_, "[ This function");
244 if (!function_in_register_r1) {
245 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
246 // The write barrier clobbers register again, keep it marked as such.
247 }
248 SetVar(this_function_var, r1, r0, r2);
249 }
250
251 // Possibly set up a local binding to the new target value.
252 Variable* new_target_var = scope()->new_target_var();
253 if (new_target_var != nullptr) {
254 Comment cmnt(masm_, "[ new.target");
255 SetVar(new_target_var, r3, r0, r2);
256 }
257
258 // Possibly allocate RestParameters
259 int rest_index;
260 Variable* rest_param = scope()->rest_parameter(&rest_index);
261 if (rest_param) {
262 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100263 if (!function_in_register_r1) {
264 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
265 }
266 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100268 function_in_register_r1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000269 SetVar(rest_param, r0, r1, r2);
270 }
271
272 Variable* arguments = scope()->arguments();
273 if (arguments != NULL) {
274 // Function uses arguments object.
275 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276 if (!function_in_register_r1) {
277 // Load this again, if it's used by the local context below.
278 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
279 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100280 if (is_strict(language_mode()) || !has_simple_parameters()) {
281 FastNewStrictArgumentsStub stub(isolate());
282 __ CallStub(&stub);
283 } else if (literal()->has_duplicate_parameters()) {
284 __ Push(r1);
285 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
286 } else {
287 FastNewSloppyArgumentsStub stub(isolate());
288 __ CallStub(&stub);
289 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000290
291 SetVar(arguments, r0, r1, r2);
292 }
293
294 if (FLAG_trace) {
295 __ CallRuntime(Runtime::kTraceEnter);
296 }
297
Ben Murdochda12d292016-06-02 14:46:10 +0100298 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100299 PrepareForBailoutForId(BailoutId::FunctionEntry(),
300 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100301 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100303 VisitDeclarations(scope()->declarations());
304 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305
Ben Murdochda12d292016-06-02 14:46:10 +0100306 // Assert that the declarations do not use ICs. Otherwise the debugger
307 // won't be able to redirect a PC at an IC to the correct IC in newly
308 // recompiled code.
309 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310
Ben Murdochda12d292016-06-02 14:46:10 +0100311 {
312 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100313 PrepareForBailoutForId(BailoutId::Declarations(),
314 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100315 Label ok;
316 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
317 __ cmp(sp, Operand(ip));
318 __ b(hs, &ok);
319 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
320 PredictableCodeSizeScope predictable(masm_);
321 predictable.ExpectSize(
322 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
323 __ Call(stack_check, RelocInfo::CODE_TARGET);
324 __ bind(&ok);
325 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326
Ben Murdochda12d292016-06-02 14:46:10 +0100327 {
328 Comment cmnt(masm_, "[ Body");
329 DCHECK(loop_depth() == 0);
330 VisitStatements(literal()->body());
331 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 }
333
334 // Always emit a 'return undefined' in case control fell off the end of
335 // the body.
336 { Comment cmnt(masm_, "[ return <undefined>;");
337 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
338 }
339 EmitReturnSequence();
340
341 // Force emit the constant pool, so it doesn't get emitted in the middle
342 // of the back edge table.
343 masm()->CheckConstPool(true, false);
344}
345
346
347void FullCodeGenerator::ClearAccumulator() {
348 __ mov(r0, Operand(Smi::FromInt(0)));
349}
350
351
352void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
353 __ mov(r2, Operand(profiling_counter_));
354 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
355 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
356 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
357}
358
359
360#ifdef CAN_USE_ARMV7_INSTRUCTIONS
361static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
362#else
363static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
364#endif
365
366
367void FullCodeGenerator::EmitProfilingCounterReset() {
368 Assembler::BlockConstPoolScope block_const_pool(masm_);
369 PredictableCodeSizeScope predictable_code_size_scope(
370 masm_, kProfileCounterResetSequenceLength);
371 Label start;
372 __ bind(&start);
373 int reset_value = FLAG_interrupt_budget;
374 __ mov(r2, Operand(profiling_counter_));
375 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
376 // instructions (for ARMv6) depending upon whether it is an extended constant
377 // pool - insert nop to compensate.
378 int expected_instr_count =
379 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
380 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
381 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
382 __ nop();
383 }
384 __ mov(r3, Operand(Smi::FromInt(reset_value)));
385 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
386}
387
388
389void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
390 Label* back_edge_target) {
391 Comment cmnt(masm_, "[ Back edge bookkeeping");
392 // Block literal pools whilst emitting back edge code.
393 Assembler::BlockConstPoolScope block_const_pool(masm_);
394 Label ok;
395
396 DCHECK(back_edge_target->is_bound());
397 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
398 int weight = Min(kMaxBackEdgeWeight,
399 Max(1, distance / kCodeSizeMultiplier));
400 EmitProfilingCounterDecrement(weight);
401 __ b(pl, &ok);
402 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
403
404 // Record a mapping of this PC offset to the OSR id. This is used to find
405 // the AST id from the unoptimized code in order to use it as a key into
406 // the deoptimization input data found in the optimized code.
407 RecordBackEdge(stmt->OsrEntryId());
408
409 EmitProfilingCounterReset();
410
411 __ bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100412 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000413 // Record a mapping of the OSR id to this PC. This is used if the OSR
414 // entry becomes the target of a bailout. We don't expect it to be, but
415 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100416 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000417}
418
Ben Murdoch097c5b22016-05-18 11:27:45 +0100419void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
420 bool is_tail_call) {
421 // Pretend that the exit is a backwards jump to the entry.
422 int weight = 1;
423 if (info_->ShouldSelfOptimize()) {
424 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
425 } else {
426 int distance = masm_->pc_offset();
427 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
428 }
429 EmitProfilingCounterDecrement(weight);
430 Label ok;
431 __ b(pl, &ok);
432 // Don't need to save result register if we are going to do a tail call.
433 if (!is_tail_call) {
434 __ push(r0);
435 }
436 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
437 if (!is_tail_call) {
438 __ pop(r0);
439 }
440 EmitProfilingCounterReset();
441 __ bind(&ok);
442}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443
444void FullCodeGenerator::EmitReturnSequence() {
445 Comment cmnt(masm_, "[ Return sequence");
446 if (return_label_.is_bound()) {
447 __ b(&return_label_);
448 } else {
449 __ bind(&return_label_);
450 if (FLAG_trace) {
451 // Push the return value on the stack as the parameter.
452 // Runtime::TraceExit returns its parameter in r0.
453 __ push(r0);
454 __ CallRuntime(Runtime::kTraceExit);
455 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100456 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000457
458 // Make sure that the constant pool is not emitted inside of the return
459 // sequence.
460 { Assembler::BlockConstPoolScope block_const_pool(masm_);
461 int32_t arg_count = info_->scope()->num_parameters() + 1;
462 int32_t sp_delta = arg_count * kPointerSize;
463 SetReturnPosition(literal());
464 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
465 PredictableCodeSizeScope predictable(masm_, -1);
466 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
467 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
468 __ add(sp, sp, Operand(sp_delta));
469 __ Jump(lr);
470 }
471 }
472 }
473}
474
Ben Murdochc5610432016-08-08 18:44:38 +0100475void FullCodeGenerator::RestoreContext() {
476 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
477}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478
479void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
480 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
481 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100482 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483}
484
485
486void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
487}
488
489
490void FullCodeGenerator::AccumulatorValueContext::Plug(
491 Heap::RootListIndex index) const {
492 __ LoadRoot(result_register(), index);
493}
494
495
496void FullCodeGenerator::StackValueContext::Plug(
497 Heap::RootListIndex index) const {
498 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100499 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000500}
501
502
503void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
504 codegen()->PrepareForBailoutBeforeSplit(condition(),
505 true,
506 true_label_,
507 false_label_);
508 if (index == Heap::kUndefinedValueRootIndex ||
509 index == Heap::kNullValueRootIndex ||
510 index == Heap::kFalseValueRootIndex) {
511 if (false_label_ != fall_through_) __ b(false_label_);
512 } else if (index == Heap::kTrueValueRootIndex) {
513 if (true_label_ != fall_through_) __ b(true_label_);
514 } else {
515 __ LoadRoot(result_register(), index);
516 codegen()->DoTest(this);
517 }
518}
519
520
521void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
522}
523
524
525void FullCodeGenerator::AccumulatorValueContext::Plug(
526 Handle<Object> lit) const {
527 __ mov(result_register(), Operand(lit));
528}
529
530
531void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
532 // Immediates cannot be pushed directly.
533 __ mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100534 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000535}
536
537
538void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
539 codegen()->PrepareForBailoutBeforeSplit(condition(),
540 true,
541 true_label_,
542 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100543 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
545 if (false_label_ != fall_through_) __ b(false_label_);
546 } else if (lit->IsTrue() || lit->IsJSObject()) {
547 if (true_label_ != fall_through_) __ b(true_label_);
548 } else if (lit->IsString()) {
549 if (String::cast(*lit)->length() == 0) {
550 if (false_label_ != fall_through_) __ b(false_label_);
551 } else {
552 if (true_label_ != fall_through_) __ b(true_label_);
553 }
554 } else if (lit->IsSmi()) {
555 if (Smi::cast(*lit)->value() == 0) {
556 if (false_label_ != fall_through_) __ b(false_label_);
557 } else {
558 if (true_label_ != fall_through_) __ b(true_label_);
559 }
560 } else {
561 // For simplicity we always test the accumulator register.
562 __ mov(result_register(), Operand(lit));
563 codegen()->DoTest(this);
564 }
565}
566
567
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000568void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
569 Register reg) const {
570 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100571 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000572 __ str(reg, MemOperand(sp, 0));
573}
574
575
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000576void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
577 Label* materialize_false) const {
578 DCHECK(materialize_true == materialize_false);
579 __ bind(materialize_true);
580}
581
582
583void FullCodeGenerator::AccumulatorValueContext::Plug(
584 Label* materialize_true,
585 Label* materialize_false) const {
586 Label done;
587 __ bind(materialize_true);
588 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
589 __ jmp(&done);
590 __ bind(materialize_false);
591 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
592 __ bind(&done);
593}
594
595
596void FullCodeGenerator::StackValueContext::Plug(
597 Label* materialize_true,
598 Label* materialize_false) const {
599 Label done;
600 __ bind(materialize_true);
601 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
602 __ jmp(&done);
603 __ bind(materialize_false);
604 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
605 __ bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100606 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607}
608
609
610void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
611 Label* materialize_false) const {
612 DCHECK(materialize_true == true_label_);
613 DCHECK(materialize_false == false_label_);
614}
615
616
617void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
618 Heap::RootListIndex value_root_index =
619 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
620 __ LoadRoot(result_register(), value_root_index);
621}
622
623
624void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
625 Heap::RootListIndex value_root_index =
626 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
627 __ LoadRoot(ip, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100628 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000629}
630
631
632void FullCodeGenerator::TestContext::Plug(bool flag) const {
633 codegen()->PrepareForBailoutBeforeSplit(condition(),
634 true,
635 true_label_,
636 false_label_);
637 if (flag) {
638 if (true_label_ != fall_through_) __ b(true_label_);
639 } else {
640 if (false_label_ != fall_through_) __ b(false_label_);
641 }
642}
643
644
645void FullCodeGenerator::DoTest(Expression* condition,
646 Label* if_true,
647 Label* if_false,
648 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100649 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000650 CallIC(ic, condition->test_id());
651 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
652 Split(eq, if_true, if_false, fall_through);
653}
654
655
656void FullCodeGenerator::Split(Condition cond,
657 Label* if_true,
658 Label* if_false,
659 Label* fall_through) {
660 if (if_false == fall_through) {
661 __ b(cond, if_true);
662 } else if (if_true == fall_through) {
663 __ b(NegateCondition(cond), if_false);
664 } else {
665 __ b(cond, if_true);
666 __ b(if_false);
667 }
668}
669
670
671MemOperand FullCodeGenerator::StackOperand(Variable* var) {
672 DCHECK(var->IsStackAllocated());
673 // Offset is negative because higher indexes are at lower addresses.
674 int offset = -var->index() * kPointerSize;
675 // Adjust by a (parameter or local) base offset.
676 if (var->IsParameter()) {
677 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
678 } else {
679 offset += JavaScriptFrameConstants::kLocal0Offset;
680 }
681 return MemOperand(fp, offset);
682}
683
684
685MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
686 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
687 if (var->IsContextSlot()) {
688 int context_chain_length = scope()->ContextChainLength(var->scope());
689 __ LoadContext(scratch, context_chain_length);
690 return ContextMemOperand(scratch, var->index());
691 } else {
692 return StackOperand(var);
693 }
694}
695
696
697void FullCodeGenerator::GetVar(Register dest, Variable* var) {
698 // Use destination as scratch.
699 MemOperand location = VarOperand(var, dest);
700 __ ldr(dest, location);
701}
702
703
704void FullCodeGenerator::SetVar(Variable* var,
705 Register src,
706 Register scratch0,
707 Register scratch1) {
708 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
709 DCHECK(!scratch0.is(src));
710 DCHECK(!scratch0.is(scratch1));
711 DCHECK(!scratch1.is(src));
712 MemOperand location = VarOperand(var, scratch0);
713 __ str(src, location);
714
715 // Emit the write barrier code if the location is in the heap.
716 if (var->IsContextSlot()) {
717 __ RecordWriteContextSlot(scratch0,
718 location.offset(),
719 src,
720 scratch1,
721 kLRHasBeenSaved,
722 kDontSaveFPRegs);
723 }
724}
725
726
727void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
728 bool should_normalize,
729 Label* if_true,
730 Label* if_false) {
731 // Only prepare for bailouts before splits if we're in a test
732 // context. Otherwise, we let the Visit function deal with the
733 // preparation to avoid preparing with the same AST id twice.
734 if (!context()->IsTest()) return;
735
736 Label skip;
737 if (should_normalize) __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100738 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000739 if (should_normalize) {
740 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
741 __ cmp(r0, ip);
742 Split(eq, if_true, if_false, NULL);
743 __ bind(&skip);
744 }
745}
746
747
748void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
749 // The variable in the declaration always resides in the current function
750 // context.
751 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100752 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000753 // Check that we're not inside a with or catch context.
754 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
755 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
756 __ Check(ne, kDeclarationInWithContext);
757 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
758 __ Check(ne, kDeclarationInCatchContext);
759 }
760}
761
762
763void FullCodeGenerator::VisitVariableDeclaration(
764 VariableDeclaration* declaration) {
765 // If it was not possible to allocate the variable at compile time, we
766 // need to "declare" it at runtime to make sure it actually exists in the
767 // local context.
768 VariableProxy* proxy = declaration->proxy();
769 VariableMode mode = declaration->mode();
770 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100771 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000772 switch (variable->location()) {
773 case VariableLocation::GLOBAL:
774 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100775 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000776 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100777 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000778 break;
779
780 case VariableLocation::PARAMETER:
781 case VariableLocation::LOCAL:
782 if (hole_init) {
783 Comment cmnt(masm_, "[ VariableDeclaration");
784 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
785 __ str(r0, StackOperand(variable));
786 }
787 break;
788
789 case VariableLocation::CONTEXT:
790 if (hole_init) {
791 Comment cmnt(masm_, "[ VariableDeclaration");
792 EmitDebugCheckDeclarationContext(variable);
793 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
794 __ str(r0, ContextMemOperand(cp, variable->index()));
795 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100796 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000797 }
798 break;
799
800 case VariableLocation::LOOKUP: {
801 Comment cmnt(masm_, "[ VariableDeclaration");
802 __ mov(r2, Operand(variable->name()));
803 // Declaration nodes are always introduced in one of four modes.
804 DCHECK(IsDeclaredVariableMode(mode));
805 // Push initial value, if any.
806 // Note: For variables we must not push an initial value (such as
807 // 'undefined') because we may have a (legal) redeclaration and we
808 // must not destroy the current value.
809 if (hole_init) {
810 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
811 } else {
812 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
813 }
814 __ Push(r2, r0);
815 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
816 __ CallRuntime(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100817 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000818 break;
819 }
820 }
821}
822
823
824void FullCodeGenerator::VisitFunctionDeclaration(
825 FunctionDeclaration* declaration) {
826 VariableProxy* proxy = declaration->proxy();
827 Variable* variable = proxy->var();
828 switch (variable->location()) {
829 case VariableLocation::GLOBAL:
830 case VariableLocation::UNALLOCATED: {
831 globals_->Add(variable->name(), zone());
832 Handle<SharedFunctionInfo> function =
833 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
834 // Check for stack-overflow exception.
835 if (function.is_null()) return SetStackOverflow();
836 globals_->Add(function, zone());
837 break;
838 }
839
840 case VariableLocation::PARAMETER:
841 case VariableLocation::LOCAL: {
842 Comment cmnt(masm_, "[ FunctionDeclaration");
843 VisitForAccumulatorValue(declaration->fun());
844 __ str(result_register(), StackOperand(variable));
845 break;
846 }
847
848 case VariableLocation::CONTEXT: {
849 Comment cmnt(masm_, "[ FunctionDeclaration");
850 EmitDebugCheckDeclarationContext(variable);
851 VisitForAccumulatorValue(declaration->fun());
852 __ str(result_register(), ContextMemOperand(cp, variable->index()));
853 int offset = Context::SlotOffset(variable->index());
854 // We know that we have written a function, which is not a smi.
855 __ RecordWriteContextSlot(cp,
856 offset,
857 result_register(),
858 r2,
859 kLRHasBeenSaved,
860 kDontSaveFPRegs,
861 EMIT_REMEMBERED_SET,
862 OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100863 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000864 break;
865 }
866
867 case VariableLocation::LOOKUP: {
868 Comment cmnt(masm_, "[ FunctionDeclaration");
869 __ mov(r2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100870 PushOperand(r2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000871 // Push initial value for function declaration.
872 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100873 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
874 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100875 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000876 break;
877 }
878 }
879}
880
881
882void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
883 // Call the runtime to declare the globals.
884 __ mov(r1, Operand(pairs));
885 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
886 __ Push(r1, r0);
887 __ CallRuntime(Runtime::kDeclareGlobals);
888 // Return value is ignored.
889}
890
891
892void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
893 // Call the runtime to declare the modules.
894 __ Push(descriptions);
895 __ CallRuntime(Runtime::kDeclareModules);
896 // Return value is ignored.
897}
898
899
900void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
901 Comment cmnt(masm_, "[ SwitchStatement");
902 Breakable nested_statement(this, stmt);
903 SetStatementPosition(stmt);
904
905 // Keep the switch value on the stack until a case matches.
906 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100907 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000908
909 ZoneList<CaseClause*>* clauses = stmt->cases();
910 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
911
912 Label next_test; // Recycled for each test.
913 // Compile all the tests with branches to their bodies.
914 for (int i = 0; i < clauses->length(); i++) {
915 CaseClause* clause = clauses->at(i);
916 clause->body_target()->Unuse();
917
918 // The default is not a test, but remember it as final fall through.
919 if (clause->is_default()) {
920 default_clause = clause;
921 continue;
922 }
923
924 Comment cmnt(masm_, "[ Case comparison");
925 __ bind(&next_test);
926 next_test.Unuse();
927
928 // Compile the label expression.
929 VisitForAccumulatorValue(clause->label());
930
931 // Perform the comparison as if via '==='.
932 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
933 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
934 JumpPatchSite patch_site(masm_);
935 if (inline_smi_code) {
936 Label slow_case;
937 __ orr(r2, r1, r0);
938 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
939
940 __ cmp(r1, r0);
941 __ b(ne, &next_test);
942 __ Drop(1); // Switch value is no longer needed.
943 __ b(clause->body_target());
944 __ bind(&slow_case);
945 }
946
947 // Record position before stub call for type feedback.
948 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100949 Handle<Code> ic =
950 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000951 CallIC(ic, clause->CompareId());
952 patch_site.EmitPatchInfo();
953
954 Label skip;
955 __ b(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100956 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000957 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
958 __ cmp(r0, ip);
959 __ b(ne, &next_test);
960 __ Drop(1);
961 __ jmp(clause->body_target());
962 __ bind(&skip);
963
964 __ cmp(r0, Operand::Zero());
965 __ b(ne, &next_test);
966 __ Drop(1); // Switch value is no longer needed.
967 __ b(clause->body_target());
968 }
969
970 // Discard the test value and jump to the default if present, otherwise to
971 // the end of the statement.
972 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100973 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000974 if (default_clause == NULL) {
975 __ b(nested_statement.break_label());
976 } else {
977 __ b(default_clause->body_target());
978 }
979
980 // Compile all the case bodies.
981 for (int i = 0; i < clauses->length(); i++) {
982 Comment cmnt(masm_, "[ Case body");
983 CaseClause* clause = clauses->at(i);
984 __ bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100985 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000986 VisitStatements(clause->statements());
987 }
988
989 __ bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100990 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000991}
992
993
994void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
995 Comment cmnt(masm_, "[ ForInStatement");
996 SetStatementPosition(stmt, SKIP_BREAK);
997
998 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
999
Ben Murdoch097c5b22016-05-18 11:27:45 +01001000 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001 SetExpressionAsStatementPosition(stmt->enumerable());
1002 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +01001003 OperandStackDepthIncrement(5);
1004
1005 Label loop, exit;
1006 Iteration loop_statement(this, stmt);
1007 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001008
Ben Murdoch097c5b22016-05-18 11:27:45 +01001009 // If the object is null or undefined, skip over the loop, otherwise convert
1010 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001011 Label convert, done_convert;
1012 __ JumpIfSmi(r0, &convert);
1013 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
1014 __ b(ge, &done_convert);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001015 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1016 __ b(eq, &exit);
1017 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1018 __ b(eq, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001019 __ bind(&convert);
1020 ToObjectStub stub(isolate());
1021 __ CallStub(&stub);
1022 __ bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +01001023 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001024 __ push(r0);
1025
Ben Murdochc5610432016-08-08 18:44:38 +01001026 // Check cache validity in generated code. If we cannot guarantee cache
1027 // validity, call the runtime system to check cache validity or get the
1028 // property names in a fixed array. Note: Proxies never have an enum cache,
1029 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001030 Label call_runtime;
1031 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001032
1033 // The enum cache is valid. Load the map of the object being
1034 // iterated over and use the cache for the iteration.
1035 Label use_cache;
1036 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1037 __ b(&use_cache);
1038
1039 // Get the set of properties to enumerate.
1040 __ bind(&call_runtime);
1041 __ push(r0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001042 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +01001043 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044
1045 // If we got a map from the runtime call, we can do a fast
1046 // modification check. Otherwise, we got a fixed array, and we have
1047 // to do a slow check.
1048 Label fixed_array;
1049 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1050 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1051 __ cmp(r2, ip);
1052 __ b(ne, &fixed_array);
1053
1054 // We got a map in register r0. Get the enumeration cache from it.
1055 Label no_descriptors;
1056 __ bind(&use_cache);
1057
1058 __ EnumLength(r1, r0);
1059 __ cmp(r1, Operand(Smi::FromInt(0)));
1060 __ b(eq, &no_descriptors);
1061
1062 __ LoadInstanceDescriptors(r0, r2);
1063 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1064 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1065
1066 // Set up the four remaining stack slots.
1067 __ push(r0); // Map.
1068 __ mov(r0, Operand(Smi::FromInt(0)));
1069 // Push enumeration cache, enumeration cache length (as smi) and zero.
1070 __ Push(r2, r1, r0);
1071 __ jmp(&loop);
1072
1073 __ bind(&no_descriptors);
1074 __ Drop(1);
1075 __ jmp(&exit);
1076
1077 // We got a fixed array in register r0. Iterate through that.
1078 __ bind(&fixed_array);
1079
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080 __ mov(r1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1081 __ Push(r1, r0); // Smi and array
1082 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001083 __ Push(r1); // Fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001084 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001085 __ mov(r0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001086 __ Push(r0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087
1088 // Generate code for doing the condition check.
1089 __ bind(&loop);
1090 SetExpressionAsStatementPosition(stmt->each());
1091
1092 // Load the current count to r0, load the length to r1.
1093 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1094 __ cmp(r0, r1); // Compare to the array length.
1095 __ b(hs, loop_statement.break_label());
1096
1097 // Get the current entry of the array into register r3.
1098 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1099 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1100 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1101
1102 // Get the expected map from the stack or a smi in the
1103 // permanent slow case into register r2.
1104 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1105
1106 // Check if the expected map still matches that of the enumerable.
1107 // If not, we may have to filter the key.
1108 Label update_each;
1109 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1110 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1111 __ cmp(r4, Operand(r2));
1112 __ b(eq, &update_each);
1113
Ben Murdochda12d292016-06-02 14:46:10 +01001114 // We need to filter the key, record slow-path here.
1115 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001116 __ EmitLoadTypeFeedbackVector(r0);
1117 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1118 __ str(r2, FieldMemOperand(r0, FixedArray::OffsetOfElementAt(vector_index)));
1119
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001120 // Convert the entry to a string or (smi) 0 if it isn't a property
1121 // any more. If the property has been removed while iterating, we
1122 // just skip it.
1123 __ push(r1); // Enumerable.
1124 __ push(r3); // Current entry.
1125 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001126 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001127 __ mov(r3, Operand(r0));
1128 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1129 __ cmp(r0, ip);
1130 __ b(eq, loop_statement.continue_label());
1131
1132 // Update the 'each' property or variable from the possibly filtered
1133 // entry in register r3.
1134 __ bind(&update_each);
1135 __ mov(result_register(), r3);
1136 // Perform the assignment as if via '='.
1137 { EffectContext context(this);
1138 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001139 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001140 }
1141
1142 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001143 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001144 // Generate code for the body of the loop.
1145 Visit(stmt->body());
1146
1147 // Generate code for the going to the next element by incrementing
1148 // the index (smi) stored on top of the stack.
1149 __ bind(loop_statement.continue_label());
1150 __ pop(r0);
1151 __ add(r0, r0, Operand(Smi::FromInt(1)));
1152 __ push(r0);
1153
1154 EmitBackEdgeBookkeeping(stmt, &loop);
1155 __ b(&loop);
1156
1157 // Remove the pointers stored on the stack.
1158 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001159 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160
1161 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001162 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001163 __ bind(&exit);
1164 decrement_loop_depth();
1165}
1166
1167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001168void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1169 FeedbackVectorSlot slot) {
1170 DCHECK(NeedsHomeObject(initializer));
1171 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1172 __ mov(StoreDescriptor::NameRegister(),
1173 Operand(isolate()->factory()->home_object_symbol()));
1174 __ ldr(StoreDescriptor::ValueRegister(),
1175 MemOperand(sp, offset * kPointerSize));
1176 EmitLoadStoreICSlot(slot);
1177 CallStoreIC();
1178}
1179
1180
1181void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1182 int offset,
1183 FeedbackVectorSlot slot) {
1184 DCHECK(NeedsHomeObject(initializer));
1185 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1186 __ mov(StoreDescriptor::NameRegister(),
1187 Operand(isolate()->factory()->home_object_symbol()));
1188 __ ldr(StoreDescriptor::ValueRegister(),
1189 MemOperand(sp, offset * kPointerSize));
1190 EmitLoadStoreICSlot(slot);
1191 CallStoreIC();
1192}
1193
1194
1195void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1196 TypeofMode typeof_mode,
1197 Label* slow) {
1198 Register current = cp;
1199 Register next = r1;
1200 Register temp = r2;
1201
1202 Scope* s = scope();
1203 while (s != NULL) {
1204 if (s->num_heap_slots() > 0) {
1205 if (s->calls_sloppy_eval()) {
1206 // Check that extension is "the hole".
1207 __ ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1208 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1209 }
1210 // Load next context in chain.
1211 __ ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1212 // Walk the rest of the chain without clobbering cp.
1213 current = next;
1214 }
1215 // If no outer scope calls eval, we do not need to check more
1216 // context extensions.
1217 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1218 s = s->outer_scope();
1219 }
1220
1221 if (s->is_eval_scope()) {
1222 Label loop, fast;
1223 if (!current.is(next)) {
1224 __ Move(next, current);
1225 }
1226 __ bind(&loop);
1227 // Terminate at native context.
1228 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1229 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1230 __ cmp(temp, ip);
1231 __ b(eq, &fast);
1232 // Check that extension is "the hole".
1233 __ ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1234 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1235 // Load next context in chain.
1236 __ ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1237 __ b(&loop);
1238 __ bind(&fast);
1239 }
1240
1241 // All extension objects were empty and it is safe to use a normal global
1242 // load machinery.
1243 EmitGlobalVariableLoad(proxy, typeof_mode);
1244}
1245
1246
1247MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1248 Label* slow) {
1249 DCHECK(var->IsContextSlot());
1250 Register context = cp;
1251 Register next = r3;
1252 Register temp = r4;
1253
1254 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1255 if (s->num_heap_slots() > 0) {
1256 if (s->calls_sloppy_eval()) {
1257 // Check that extension is "the hole".
1258 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1259 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1260 }
1261 __ ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1262 // Walk the rest of the chain without clobbering cp.
1263 context = next;
1264 }
1265 }
1266 // Check that last extension is "the hole".
1267 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1268 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1269
1270 // This function is used only for loads, not stores, so it's safe to
1271 // return an cp-based operand (the write barrier cannot be allowed to
1272 // destroy the cp register).
1273 return ContextMemOperand(context, var->index());
1274}
1275
1276
1277void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1278 TypeofMode typeof_mode,
1279 Label* slow, Label* done) {
1280 // Generate fast-case code for variables that might be shadowed by
1281 // eval-introduced variables. Eval is used a lot without
1282 // introducing variables. In those cases, we do not want to
1283 // perform a runtime call for all variables in the scope
1284 // containing the eval.
1285 Variable* var = proxy->var();
1286 if (var->mode() == DYNAMIC_GLOBAL) {
1287 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1288 __ jmp(done);
1289 } else if (var->mode() == DYNAMIC_LOCAL) {
1290 Variable* local = var->local_if_not_shadowed();
1291 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001292 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001293 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01001294 __ b(ne, done);
1295 __ mov(r0, Operand(var->name()));
1296 __ push(r0);
1297 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001298 }
1299 __ jmp(done);
1300 }
1301}
1302
1303
1304void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1305 TypeofMode typeof_mode) {
1306 Variable* var = proxy->var();
1307 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1308 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1309 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1310 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1311 __ mov(LoadDescriptor::SlotRegister(),
1312 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1313 CallLoadIC(typeof_mode);
1314}
1315
1316
1317void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1318 TypeofMode typeof_mode) {
1319 // Record position before possible IC call.
1320 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001321 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001322 Variable* var = proxy->var();
1323
1324 // Three cases: global variables, lookup variables, and all other types of
1325 // variables.
1326 switch (var->location()) {
1327 case VariableLocation::GLOBAL:
1328 case VariableLocation::UNALLOCATED: {
1329 Comment cmnt(masm_, "[ Global variable");
1330 EmitGlobalVariableLoad(proxy, typeof_mode);
1331 context()->Plug(r0);
1332 break;
1333 }
1334
1335 case VariableLocation::PARAMETER:
1336 case VariableLocation::LOCAL:
1337 case VariableLocation::CONTEXT: {
1338 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1339 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1340 : "[ Stack variable");
1341 if (NeedsHoleCheckForLoad(proxy)) {
1342 // Let and const need a read barrier.
1343 GetVar(r0, var);
1344 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1345 if (var->mode() == LET || var->mode() == CONST) {
1346 // Throw a reference error when using an uninitialized let/const
1347 // binding in harmony mode.
1348 Label done;
1349 __ b(ne, &done);
1350 __ mov(r0, Operand(var->name()));
1351 __ push(r0);
1352 __ CallRuntime(Runtime::kThrowReferenceError);
1353 __ bind(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001354 }
1355 context()->Plug(r0);
1356 break;
1357 }
1358 context()->Plug(var);
1359 break;
1360 }
1361
1362 case VariableLocation::LOOKUP: {
1363 Comment cmnt(masm_, "[ Lookup variable");
1364 Label done, slow;
1365 // Generate code for loading from variables potentially shadowed
1366 // by eval-introduced variables.
1367 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1368 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001369 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001370 Runtime::FunctionId function_id =
1371 typeof_mode == NOT_INSIDE_TYPEOF
1372 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001373 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001374 __ CallRuntime(function_id);
1375 __ bind(&done);
1376 context()->Plug(r0);
1377 }
1378 }
1379}
1380
1381
1382void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1383 Comment cmnt(masm_, "[ RegExpLiteral");
1384 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1385 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1386 __ mov(r1, Operand(expr->pattern()));
1387 __ mov(r0, Operand(Smi::FromInt(expr->flags())));
1388 FastCloneRegExpStub stub(isolate());
1389 __ CallStub(&stub);
1390 context()->Plug(r0);
1391}
1392
1393
1394void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1395 Expression* expression = (property == NULL) ? NULL : property->value();
1396 if (expression == NULL) {
1397 __ LoadRoot(r1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001398 PushOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001399 } else {
1400 VisitForStackValue(expression);
1401 if (NeedsHomeObject(expression)) {
1402 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1403 property->kind() == ObjectLiteral::Property::SETTER);
1404 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1405 EmitSetHomeObject(expression, offset, property->GetSlot());
1406 }
1407 }
1408}
1409
1410
1411void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1412 Comment cmnt(masm_, "[ ObjectLiteral");
1413
1414 Handle<FixedArray> constant_properties = expr->constant_properties();
1415 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1416 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1417 __ mov(r1, Operand(constant_properties));
1418 int flags = expr->ComputeFlags();
1419 __ mov(r0, Operand(Smi::FromInt(flags)));
1420 if (MustCreateObjectLiteralWithRuntime(expr)) {
1421 __ Push(r3, r2, r1, r0);
1422 __ CallRuntime(Runtime::kCreateObjectLiteral);
1423 } else {
1424 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1425 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001426 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001427 }
Ben Murdochc5610432016-08-08 18:44:38 +01001428 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001429
1430 // If result_saved is true the result is on top of the stack. If
1431 // result_saved is false the result is in r0.
1432 bool result_saved = false;
1433
1434 AccessorTable accessor_table(zone());
1435 int property_index = 0;
1436 for (; property_index < expr->properties()->length(); property_index++) {
1437 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1438 if (property->is_computed_name()) break;
1439 if (property->IsCompileTimeValue()) continue;
1440
1441 Literal* key = property->key()->AsLiteral();
1442 Expression* value = property->value();
1443 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001444 PushOperand(r0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001445 result_saved = true;
1446 }
1447 switch (property->kind()) {
1448 case ObjectLiteral::Property::CONSTANT:
1449 UNREACHABLE();
1450 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1451 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1452 // Fall through.
1453 case ObjectLiteral::Property::COMPUTED:
1454 // It is safe to use [[Put]] here because the boilerplate already
1455 // contains computed properties with an uninitialized value.
1456 if (key->value()->IsInternalizedString()) {
1457 if (property->emit_store()) {
1458 VisitForAccumulatorValue(value);
1459 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1460 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1461 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1462 EmitLoadStoreICSlot(property->GetSlot(0));
1463 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001464 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001465
1466 if (NeedsHomeObject(value)) {
1467 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1468 }
1469 } else {
1470 VisitForEffect(value);
1471 }
1472 break;
1473 }
1474 // Duplicate receiver on stack.
1475 __ ldr(r0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001476 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001477 VisitForStackValue(key);
1478 VisitForStackValue(value);
1479 if (property->emit_store()) {
1480 if (NeedsHomeObject(value)) {
1481 EmitSetHomeObject(value, 2, property->GetSlot());
1482 }
1483 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
Ben Murdoch097c5b22016-05-18 11:27:45 +01001484 PushOperand(r0);
1485 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001486 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001487 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001488 }
1489 break;
1490 case ObjectLiteral::Property::PROTOTYPE:
1491 // Duplicate receiver on stack.
1492 __ ldr(r0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001493 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001494 VisitForStackValue(value);
1495 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001496 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001497 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001498 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001499 break;
1500
1501 case ObjectLiteral::Property::GETTER:
1502 if (property->emit_store()) {
1503 accessor_table.lookup(key)->second->getter = property;
1504 }
1505 break;
1506 case ObjectLiteral::Property::SETTER:
1507 if (property->emit_store()) {
1508 accessor_table.lookup(key)->second->setter = property;
1509 }
1510 break;
1511 }
1512 }
1513
1514 // Emit code to define accessors, using only a single call to the runtime for
1515 // each pair of corresponding getters and setters.
1516 for (AccessorTable::Iterator it = accessor_table.begin();
1517 it != accessor_table.end();
1518 ++it) {
1519 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001520 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001521 VisitForStackValue(it->first);
1522 EmitAccessor(it->second->getter);
1523 EmitAccessor(it->second->setter);
1524 __ mov(r0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001525 PushOperand(r0);
1526 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001527 }
1528
1529 // Object literals have two parts. The "static" part on the left contains no
1530 // computed property names, and so we can compute its map ahead of time; see
1531 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1532 // starts with the first computed property name, and continues with all
1533 // properties to its right. All the code from above initializes the static
1534 // component of the object literal, and arranges for the map of the result to
1535 // reflect the static order in which the keys appear. For the dynamic
1536 // properties, we compile them into a series of "SetOwnProperty" runtime
1537 // calls. This will preserve insertion order.
1538 for (; property_index < expr->properties()->length(); property_index++) {
1539 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1540
1541 Expression* value = property->value();
1542 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001543 PushOperand(r0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 result_saved = true;
1545 }
1546
1547 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001548 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001549
1550 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1551 DCHECK(!property->is_computed_name());
1552 VisitForStackValue(value);
1553 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001554 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001555 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001556 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001557 } else {
1558 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1559 VisitForStackValue(value);
1560 if (NeedsHomeObject(value)) {
1561 EmitSetHomeObject(value, 2, property->GetSlot());
1562 }
1563
1564 switch (property->kind()) {
1565 case ObjectLiteral::Property::CONSTANT:
1566 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1567 case ObjectLiteral::Property::COMPUTED:
1568 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001569 PushOperand(Smi::FromInt(NONE));
1570 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1571 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001573 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001574 }
1575 break;
1576
1577 case ObjectLiteral::Property::PROTOTYPE:
1578 UNREACHABLE();
1579 break;
1580
1581 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001582 PushOperand(Smi::FromInt(NONE));
1583 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001584 break;
1585
1586 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001587 PushOperand(Smi::FromInt(NONE));
1588 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001589 break;
1590 }
1591 }
1592 }
1593
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001594 if (result_saved) {
1595 context()->PlugTOS();
1596 } else {
1597 context()->Plug(r0);
1598 }
1599}
1600
1601
1602void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1603 Comment cmnt(masm_, "[ ArrayLiteral");
1604
1605 Handle<FixedArray> constant_elements = expr->constant_elements();
1606 bool has_fast_elements =
1607 IsFastObjectElementsKind(expr->constant_elements_kind());
1608 Handle<FixedArrayBase> constant_elements_values(
1609 FixedArrayBase::cast(constant_elements->get(1)));
1610
1611 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1612 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1613 // If the only customer of allocation sites is transitioning, then
1614 // we can turn it off if we don't have anywhere else to transition to.
1615 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1616 }
1617
1618 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1619 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1620 __ mov(r1, Operand(constant_elements));
1621 if (MustCreateArrayLiteralWithRuntime(expr)) {
1622 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1623 __ Push(r3, r2, r1, r0);
1624 __ CallRuntime(Runtime::kCreateArrayLiteral);
1625 } else {
1626 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1627 __ CallStub(&stub);
1628 }
Ben Murdochc5610432016-08-08 18:44:38 +01001629 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001630
1631 bool result_saved = false; // Is the result saved to the stack?
1632 ZoneList<Expression*>* subexprs = expr->values();
1633 int length = subexprs->length();
1634
1635 // Emit code to evaluate all the non-constant subexpressions and to store
1636 // them into the newly cloned array.
1637 int array_index = 0;
1638 for (; array_index < length; array_index++) {
1639 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001640 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001641
1642 // If the subexpression is a literal or a simple materialized literal it
1643 // is already set in the cloned array.
1644 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1645
1646 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001647 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001648 result_saved = true;
1649 }
1650 VisitForAccumulatorValue(subexpr);
1651
1652 __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1653 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1654 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1655 Handle<Code> ic =
1656 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1657 CallIC(ic);
1658
Ben Murdochc5610432016-08-08 18:44:38 +01001659 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1660 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001661 }
1662
1663 // In case the array literal contains spread expressions it has two parts. The
1664 // first part is the "static" array which has a literal index is handled
1665 // above. The second part is the part after the first spread expression
1666 // (inclusive) and these elements gets appended to the array. Note that the
1667 // number elements an iterable produces is unknown ahead of time.
1668 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001669 PopOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001670 result_saved = false;
1671 }
1672 for (; array_index < length; array_index++) {
1673 Expression* subexpr = subexprs->at(array_index);
1674
Ben Murdoch097c5b22016-05-18 11:27:45 +01001675 PushOperand(r0);
1676 DCHECK(!subexpr->IsSpread());
1677 VisitForStackValue(subexpr);
1678 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001679
Ben Murdochc5610432016-08-08 18:44:38 +01001680 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1681 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001682 }
1683
1684 if (result_saved) {
1685 context()->PlugTOS();
1686 } else {
1687 context()->Plug(r0);
1688 }
1689}
1690
1691
1692void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1693 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1694
1695 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001696
1697 Property* property = expr->target()->AsProperty();
1698 LhsKind assign_type = Property::GetAssignType(property);
1699
1700 // Evaluate LHS expression.
1701 switch (assign_type) {
1702 case VARIABLE:
1703 // Nothing to do here.
1704 break;
1705 case NAMED_PROPERTY:
1706 if (expr->is_compound()) {
1707 // We need the receiver both on the stack and in the register.
1708 VisitForStackValue(property->obj());
1709 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1710 } else {
1711 VisitForStackValue(property->obj());
1712 }
1713 break;
1714 case NAMED_SUPER_PROPERTY:
1715 VisitForStackValue(
1716 property->obj()->AsSuperPropertyReference()->this_var());
1717 VisitForAccumulatorValue(
1718 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001719 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001720 if (expr->is_compound()) {
1721 const Register scratch = r1;
1722 __ ldr(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001723 PushOperand(scratch);
1724 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001725 }
1726 break;
1727 case KEYED_SUPER_PROPERTY:
1728 VisitForStackValue(
1729 property->obj()->AsSuperPropertyReference()->this_var());
1730 VisitForStackValue(
1731 property->obj()->AsSuperPropertyReference()->home_object());
1732 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001733 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001734 if (expr->is_compound()) {
1735 const Register scratch = r1;
1736 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001737 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001738 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001739 PushOperand(scratch);
1740 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001741 }
1742 break;
1743 case KEYED_PROPERTY:
1744 if (expr->is_compound()) {
1745 VisitForStackValue(property->obj());
1746 VisitForStackValue(property->key());
1747 __ ldr(LoadDescriptor::ReceiverRegister(),
1748 MemOperand(sp, 1 * kPointerSize));
1749 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1750 } else {
1751 VisitForStackValue(property->obj());
1752 VisitForStackValue(property->key());
1753 }
1754 break;
1755 }
1756
1757 // For compound assignments we need another deoptimization point after the
1758 // variable/property load.
1759 if (expr->is_compound()) {
1760 { AccumulatorValueContext context(this);
1761 switch (assign_type) {
1762 case VARIABLE:
1763 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001764 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001765 break;
1766 case NAMED_PROPERTY:
1767 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001768 PrepareForBailoutForId(property->LoadId(),
1769 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001770 break;
1771 case NAMED_SUPER_PROPERTY:
1772 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001773 PrepareForBailoutForId(property->LoadId(),
1774 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001775 break;
1776 case KEYED_SUPER_PROPERTY:
1777 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001778 PrepareForBailoutForId(property->LoadId(),
1779 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780 break;
1781 case KEYED_PROPERTY:
1782 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001783 PrepareForBailoutForId(property->LoadId(),
1784 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001785 break;
1786 }
1787 }
1788
1789 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001790 PushOperand(r0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 VisitForAccumulatorValue(expr->value());
1792
1793 AccumulatorValueContext context(this);
1794 if (ShouldInlineSmiCase(op)) {
1795 EmitInlineSmiBinaryOp(expr->binary_operation(),
1796 op,
1797 expr->target(),
1798 expr->value());
1799 } else {
1800 EmitBinaryOp(expr->binary_operation(), op);
1801 }
1802
1803 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001804 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001805 } else {
1806 VisitForAccumulatorValue(expr->value());
1807 }
1808
1809 SetExpressionPosition(expr);
1810
1811 // Store the value.
1812 switch (assign_type) {
1813 case VARIABLE:
1814 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1815 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001816 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001817 context()->Plug(r0);
1818 break;
1819 case NAMED_PROPERTY:
1820 EmitNamedPropertyAssignment(expr);
1821 break;
1822 case NAMED_SUPER_PROPERTY:
1823 EmitNamedSuperPropertyStore(property);
1824 context()->Plug(r0);
1825 break;
1826 case KEYED_SUPER_PROPERTY:
1827 EmitKeyedSuperPropertyStore(property);
1828 context()->Plug(r0);
1829 break;
1830 case KEYED_PROPERTY:
1831 EmitKeyedPropertyAssignment(expr);
1832 break;
1833 }
1834}
1835
1836
1837void FullCodeGenerator::VisitYield(Yield* expr) {
1838 Comment cmnt(masm_, "[ Yield");
1839 SetExpressionPosition(expr);
1840
1841 // Evaluate yielded value first; the initial iterator definition depends on
1842 // this. It stays on the stack while we update the iterator.
1843 VisitForStackValue(expr->expression());
1844
Ben Murdochc5610432016-08-08 18:44:38 +01001845 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001846
Ben Murdochda12d292016-06-02 14:46:10 +01001847 __ jmp(&suspend);
1848 __ bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01001849 // When we arrive here, r0 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01001850 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01001851 __ ldr(r1, FieldMemOperand(r0, JSGeneratorObject::kResumeModeOffset));
1852 __ ldr(r0, FieldMemOperand(r0, JSGeneratorObject::kInputOffset));
1853 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1854 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1855 __ cmp(r1, Operand(Smi::FromInt(JSGeneratorObject::kReturn)));
1856 __ b(lt, &resume);
1857 __ Push(result_register());
1858 __ b(gt, &exception);
Ben Murdochda12d292016-06-02 14:46:10 +01001859 EmitCreateIteratorResult(true);
1860 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001861
Ben Murdochc5610432016-08-08 18:44:38 +01001862 __ bind(&exception);
1863 __ CallRuntime(Runtime::kThrow);
1864
Ben Murdochda12d292016-06-02 14:46:10 +01001865 __ bind(&suspend);
1866 OperandStackDepthIncrement(1); // Not popped on this path.
1867 VisitForAccumulatorValue(expr->generator_object());
1868 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1869 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1870 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1871 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1872 __ mov(r1, cp);
1873 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1874 kLRHasBeenSaved, kDontSaveFPRegs);
1875 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1876 __ cmp(sp, r1);
1877 __ b(eq, &post_runtime);
1878 __ push(r0); // generator object
1879 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01001880 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01001881 __ bind(&post_runtime);
1882 PopOperand(result_register());
1883 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001884
Ben Murdochda12d292016-06-02 14:46:10 +01001885 __ bind(&resume);
1886 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001887}
1888
Ben Murdoch097c5b22016-05-18 11:27:45 +01001889void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1890 OperandStackDepthIncrement(2);
1891 __ Push(reg1, reg2);
1892}
1893
1894void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1895 OperandStackDepthDecrement(2);
1896 __ Pop(reg1, reg2);
1897}
1898
1899void FullCodeGenerator::EmitOperandStackDepthCheck() {
1900 if (FLAG_debug_code) {
1901 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1902 operand_stack_depth_ * kPointerSize;
1903 __ sub(r0, fp, sp);
1904 __ cmp(r0, Operand(expected_diff));
1905 __ Assert(eq, kUnexpectedStackDepth);
1906 }
1907}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001908
1909void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1910 Label allocate, done_allocate;
1911
Ben Murdochc5610432016-08-08 18:44:38 +01001912 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate,
1913 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001914 __ b(&done_allocate);
1915
1916 __ bind(&allocate);
1917 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1918 __ CallRuntime(Runtime::kAllocateInNewSpace);
1919
1920 __ bind(&done_allocate);
1921 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
Ben Murdochda12d292016-06-02 14:46:10 +01001922 PopOperand(r2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001923 __ LoadRoot(r3,
1924 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1925 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
1926 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
1927 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1928 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1929 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
1930 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
1931}
1932
1933
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001934void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1935 Token::Value op,
1936 Expression* left_expr,
1937 Expression* right_expr) {
1938 Label done, smi_case, stub_call;
1939
1940 Register scratch1 = r2;
1941 Register scratch2 = r3;
1942
1943 // Get the arguments.
1944 Register left = r1;
1945 Register right = r0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001946 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001947
1948 // Perform combined smi check on both operands.
1949 __ orr(scratch1, left, Operand(right));
1950 STATIC_ASSERT(kSmiTag == 0);
1951 JumpPatchSite patch_site(masm_);
1952 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1953
1954 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001955 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001956 CallIC(code, expr->BinaryOperationFeedbackId());
1957 patch_site.EmitPatchInfo();
1958 __ jmp(&done);
1959
1960 __ bind(&smi_case);
1961 // Smi case. This code works the same way as the smi-smi case in the type
1962 // recording binary operation stub, see
1963 switch (op) {
1964 case Token::SAR:
1965 __ GetLeastBitsFromSmi(scratch1, right, 5);
1966 __ mov(right, Operand(left, ASR, scratch1));
1967 __ bic(right, right, Operand(kSmiTagMask));
1968 break;
1969 case Token::SHL: {
1970 __ SmiUntag(scratch1, left);
1971 __ GetLeastBitsFromSmi(scratch2, right, 5);
1972 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1973 __ TrySmiTag(right, scratch1, &stub_call);
1974 break;
1975 }
1976 case Token::SHR: {
1977 __ SmiUntag(scratch1, left);
1978 __ GetLeastBitsFromSmi(scratch2, right, 5);
1979 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1980 __ tst(scratch1, Operand(0xc0000000));
1981 __ b(ne, &stub_call);
1982 __ SmiTag(right, scratch1);
1983 break;
1984 }
1985 case Token::ADD:
1986 __ add(scratch1, left, Operand(right), SetCC);
1987 __ b(vs, &stub_call);
1988 __ mov(right, scratch1);
1989 break;
1990 case Token::SUB:
1991 __ sub(scratch1, left, Operand(right), SetCC);
1992 __ b(vs, &stub_call);
1993 __ mov(right, scratch1);
1994 break;
1995 case Token::MUL: {
1996 __ SmiUntag(ip, right);
1997 __ smull(scratch1, scratch2, left, ip);
1998 __ mov(ip, Operand(scratch1, ASR, 31));
1999 __ cmp(ip, Operand(scratch2));
2000 __ b(ne, &stub_call);
2001 __ cmp(scratch1, Operand::Zero());
2002 __ mov(right, Operand(scratch1), LeaveCC, ne);
2003 __ b(ne, &done);
2004 __ add(scratch2, right, Operand(left), SetCC);
2005 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2006 __ b(mi, &stub_call);
2007 break;
2008 }
2009 case Token::BIT_OR:
2010 __ orr(right, left, Operand(right));
2011 break;
2012 case Token::BIT_AND:
2013 __ and_(right, left, Operand(right));
2014 break;
2015 case Token::BIT_XOR:
2016 __ eor(right, left, Operand(right));
2017 break;
2018 default:
2019 UNREACHABLE();
2020 }
2021
2022 __ bind(&done);
2023 context()->Plug(r0);
2024}
2025
2026
2027void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002028 for (int i = 0; i < lit->properties()->length(); i++) {
2029 ObjectLiteral::Property* property = lit->properties()->at(i);
2030 Expression* value = property->value();
2031
Ben Murdoch097c5b22016-05-18 11:27:45 +01002032 Register scratch = r1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002033 if (property->is_static()) {
2034 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2035 } else {
2036 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2037 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002038 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002039 EmitPropertyKey(property, lit->GetIdForProperty(i));
2040
2041 // The static prototype property is read only. We handle the non computed
2042 // property name case in the parser. Since this is the only case where we
2043 // need to check for an own read only property we special case this so we do
2044 // not need to do this for every property.
2045 if (property->is_static() && property->is_computed_name()) {
2046 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2047 __ push(r0);
2048 }
2049
2050 VisitForStackValue(value);
2051 if (NeedsHomeObject(value)) {
2052 EmitSetHomeObject(value, 2, property->GetSlot());
2053 }
2054
2055 switch (property->kind()) {
2056 case ObjectLiteral::Property::CONSTANT:
2057 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2058 case ObjectLiteral::Property::PROTOTYPE:
2059 UNREACHABLE();
2060 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002061 PushOperand(Smi::FromInt(DONT_ENUM));
2062 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2063 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002064 break;
2065
2066 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002067 PushOperand(Smi::FromInt(DONT_ENUM));
2068 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002069 break;
2070
2071 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002072 PushOperand(Smi::FromInt(DONT_ENUM));
2073 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002074 break;
2075
2076 default:
2077 UNREACHABLE();
2078 }
2079 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002080}
2081
2082
2083void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002084 PopOperand(r1);
2085 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002086 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2087 CallIC(code, expr->BinaryOperationFeedbackId());
2088 patch_site.EmitPatchInfo();
2089 context()->Plug(r0);
2090}
2091
2092
2093void FullCodeGenerator::EmitAssignment(Expression* expr,
2094 FeedbackVectorSlot slot) {
2095 DCHECK(expr->IsValidReferenceExpressionOrThis());
2096
2097 Property* prop = expr->AsProperty();
2098 LhsKind assign_type = Property::GetAssignType(prop);
2099
2100 switch (assign_type) {
2101 case VARIABLE: {
2102 Variable* var = expr->AsVariableProxy()->var();
2103 EffectContext context(this);
2104 EmitVariableAssignment(var, Token::ASSIGN, slot);
2105 break;
2106 }
2107 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002108 PushOperand(r0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002109 VisitForAccumulatorValue(prop->obj());
2110 __ Move(StoreDescriptor::ReceiverRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002111 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112 __ mov(StoreDescriptor::NameRegister(),
2113 Operand(prop->key()->AsLiteral()->value()));
2114 EmitLoadStoreICSlot(slot);
2115 CallStoreIC();
2116 break;
2117 }
2118 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002119 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002120 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2121 VisitForAccumulatorValue(
2122 prop->obj()->AsSuperPropertyReference()->home_object());
2123 // stack: value, this; r0: home_object
2124 Register scratch = r2;
2125 Register scratch2 = r3;
2126 __ mov(scratch, result_register()); // home_object
2127 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2128 __ ldr(scratch2, MemOperand(sp, 0)); // this
2129 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2130 __ str(scratch, MemOperand(sp, 0)); // home_object
2131 // stack: this, home_object; r0: value
2132 EmitNamedSuperPropertyStore(prop);
2133 break;
2134 }
2135 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002136 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002137 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2138 VisitForStackValue(
2139 prop->obj()->AsSuperPropertyReference()->home_object());
2140 VisitForAccumulatorValue(prop->key());
2141 Register scratch = r2;
2142 Register scratch2 = r3;
2143 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2144 // stack: value, this, home_object; r0: key, r3: value
2145 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2146 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2147 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2148 __ str(scratch, MemOperand(sp, kPointerSize));
2149 __ str(r0, MemOperand(sp, 0));
2150 __ Move(r0, scratch2);
2151 // stack: this, home_object, key; r0: value.
2152 EmitKeyedSuperPropertyStore(prop);
2153 break;
2154 }
2155 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002156 PushOperand(r0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157 VisitForStackValue(prop->obj());
2158 VisitForAccumulatorValue(prop->key());
2159 __ Move(StoreDescriptor::NameRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002160 PopOperands(StoreDescriptor::ValueRegister(),
2161 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002162 EmitLoadStoreICSlot(slot);
2163 Handle<Code> ic =
2164 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2165 CallIC(ic);
2166 break;
2167 }
2168 }
2169 context()->Plug(r0);
2170}
2171
2172
2173void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2174 Variable* var, MemOperand location) {
2175 __ str(result_register(), location);
2176 if (var->IsContextSlot()) {
2177 // RecordWrite may destroy all its register arguments.
2178 __ mov(r3, result_register());
2179 int offset = Context::SlotOffset(var->index());
2180 __ RecordWriteContextSlot(
2181 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2182 }
2183}
2184
2185
2186void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2187 FeedbackVectorSlot slot) {
2188 if (var->IsUnallocated()) {
2189 // Global var, const, or let.
2190 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2191 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2192 EmitLoadStoreICSlot(slot);
2193 CallStoreIC();
2194
2195 } else if (var->mode() == LET && op != Token::INIT) {
2196 // Non-initializing assignment to let variable needs a write barrier.
2197 DCHECK(!var->IsLookupSlot());
2198 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2199 Label assign;
2200 MemOperand location = VarOperand(var, r1);
2201 __ ldr(r3, location);
2202 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2203 __ b(ne, &assign);
2204 __ mov(r3, Operand(var->name()));
2205 __ push(r3);
2206 __ CallRuntime(Runtime::kThrowReferenceError);
2207 // Perform the assignment.
2208 __ bind(&assign);
2209 EmitStoreToStackLocalOrContextSlot(var, location);
2210
2211 } else if (var->mode() == CONST && op != Token::INIT) {
2212 // Assignment to const variable needs a write barrier.
2213 DCHECK(!var->IsLookupSlot());
2214 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2215 Label const_error;
2216 MemOperand location = VarOperand(var, r1);
2217 __ ldr(r3, location);
2218 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2219 __ b(ne, &const_error);
2220 __ mov(r3, Operand(var->name()));
2221 __ push(r3);
2222 __ CallRuntime(Runtime::kThrowReferenceError);
2223 __ bind(&const_error);
2224 __ CallRuntime(Runtime::kThrowConstAssignError);
2225
2226 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2227 // Initializing assignment to const {this} needs a write barrier.
2228 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2229 Label uninitialized_this;
2230 MemOperand location = VarOperand(var, r1);
2231 __ ldr(r3, location);
2232 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2233 __ b(eq, &uninitialized_this);
2234 __ mov(r0, Operand(var->name()));
2235 __ Push(r0);
2236 __ CallRuntime(Runtime::kThrowReferenceError);
2237 __ bind(&uninitialized_this);
2238 EmitStoreToStackLocalOrContextSlot(var, location);
2239
Ben Murdochc5610432016-08-08 18:44:38 +01002240 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002241 if (var->IsLookupSlot()) {
2242 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002243 __ Push(var->name());
2244 __ Push(r0);
2245 __ CallRuntime(is_strict(language_mode())
2246 ? Runtime::kStoreLookupSlot_Strict
2247 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002248 } else {
2249 // Assignment to var or initializing assignment to let/const in harmony
2250 // mode.
2251 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2252 MemOperand location = VarOperand(var, r1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002253 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002254 // Check for an uninitialized let binding.
2255 __ ldr(r2, location);
2256 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2257 __ Check(eq, kLetBindingReInitialization);
2258 }
2259 EmitStoreToStackLocalOrContextSlot(var, location);
2260 }
2261
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002262 } else {
2263 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2264 if (is_strict(language_mode())) {
2265 __ CallRuntime(Runtime::kThrowConstAssignError);
2266 }
2267 // Silently ignore store in sloppy mode.
2268 }
2269}
2270
2271
2272void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2273 // Assignment to a property, using a named store IC.
2274 Property* prop = expr->target()->AsProperty();
2275 DCHECK(prop != NULL);
2276 DCHECK(prop->key()->IsLiteral());
2277
2278 __ mov(StoreDescriptor::NameRegister(),
2279 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002280 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002281 EmitLoadStoreICSlot(expr->AssignmentSlot());
2282 CallStoreIC();
2283
Ben Murdochc5610432016-08-08 18:44:38 +01002284 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002285 context()->Plug(r0);
2286}
2287
2288
2289void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2290 // Assignment to named property of super.
2291 // r0 : value
2292 // stack : receiver ('this'), home_object
2293 DCHECK(prop != NULL);
2294 Literal* key = prop->key()->AsLiteral();
2295 DCHECK(key != NULL);
2296
Ben Murdoch097c5b22016-05-18 11:27:45 +01002297 PushOperand(key->value());
2298 PushOperand(r0);
2299 CallRuntimeWithOperands(is_strict(language_mode())
2300 ? Runtime::kStoreToSuper_Strict
2301 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002302}
2303
2304
2305void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2306 // Assignment to named property of super.
2307 // r0 : value
2308 // stack : receiver ('this'), home_object, key
2309 DCHECK(prop != NULL);
2310
Ben Murdoch097c5b22016-05-18 11:27:45 +01002311 PushOperand(r0);
2312 CallRuntimeWithOperands(is_strict(language_mode())
2313 ? Runtime::kStoreKeyedToSuper_Strict
2314 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002315}
2316
2317
2318void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2319 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002320 PopOperands(StoreDescriptor::ReceiverRegister(),
2321 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002322 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2323
2324 Handle<Code> ic =
2325 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2326 EmitLoadStoreICSlot(expr->AssignmentSlot());
2327 CallIC(ic);
2328
Ben Murdochc5610432016-08-08 18:44:38 +01002329 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002330 context()->Plug(r0);
2331}
2332
2333
2334void FullCodeGenerator::CallIC(Handle<Code> code,
2335 TypeFeedbackId ast_id) {
2336 ic_total_count_++;
2337 // All calls must have a predictable size in full-codegen code to ensure that
2338 // the debugger can patch them correctly.
2339 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2340 NEVER_INLINE_TARGET_ADDRESS);
2341}
2342
2343
2344// Code common for calls using the IC.
2345void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2346 Expression* callee = expr->expression();
2347
2348 // Get the target function.
2349 ConvertReceiverMode convert_mode;
2350 if (callee->IsVariableProxy()) {
2351 { StackValueContext context(this);
2352 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002353 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002354 }
2355 // Push undefined as receiver. This is patched in the method prologue if it
2356 // is a sloppy mode method.
2357 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002358 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002359 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2360 } else {
2361 // Load the function from the receiver.
2362 DCHECK(callee->IsProperty());
2363 DCHECK(!callee->AsProperty()->IsSuperAccess());
2364 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2365 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002366 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2367 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002368 // Push the target function under the receiver.
2369 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002370 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002371 __ str(r0, MemOperand(sp, kPointerSize));
2372 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2373 }
2374
2375 EmitCall(expr, convert_mode);
2376}
2377
2378
2379void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2380 Expression* callee = expr->expression();
2381 DCHECK(callee->IsProperty());
2382 Property* prop = callee->AsProperty();
2383 DCHECK(prop->IsSuperAccess());
2384 SetExpressionPosition(prop);
2385
2386 Literal* key = prop->key()->AsLiteral();
2387 DCHECK(!key->value()->IsSmi());
2388 // Load the function from the receiver.
2389 const Register scratch = r1;
2390 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2391 VisitForStackValue(super_ref->home_object());
2392 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002393 PushOperand(r0);
2394 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002395 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002396 PushOperand(scratch);
2397 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002398
2399 // Stack here:
2400 // - home_object
2401 // - this (receiver)
2402 // - this (receiver) <-- LoadFromSuper will pop here and below.
2403 // - home_object
2404 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002405 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002406 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002407
2408 // Replace home_object with target function.
2409 __ str(r0, MemOperand(sp, kPointerSize));
2410
2411 // Stack here:
2412 // - target function
2413 // - this (receiver)
2414 EmitCall(expr);
2415}
2416
2417
2418// Code common for calls using the IC.
2419void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2420 Expression* key) {
2421 // Load the key.
2422 VisitForAccumulatorValue(key);
2423
2424 Expression* callee = expr->expression();
2425
2426 // Load the function from the receiver.
2427 DCHECK(callee->IsProperty());
2428 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2429 __ Move(LoadDescriptor::NameRegister(), r0);
2430 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002431 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2432 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002433
2434 // Push the target function under the receiver.
2435 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002436 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002437 __ str(r0, MemOperand(sp, kPointerSize));
2438
2439 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2440}
2441
2442
2443void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2444 Expression* callee = expr->expression();
2445 DCHECK(callee->IsProperty());
2446 Property* prop = callee->AsProperty();
2447 DCHECK(prop->IsSuperAccess());
2448
2449 SetExpressionPosition(prop);
2450 // Load the function from the receiver.
2451 const Register scratch = r1;
2452 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2453 VisitForStackValue(super_ref->home_object());
2454 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002455 PushOperand(r0);
2456 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002457 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002458 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002459 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002460
2461 // Stack here:
2462 // - home_object
2463 // - this (receiver)
2464 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2465 // - home_object
2466 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002467 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002468 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002469
2470 // Replace home_object with target function.
2471 __ str(r0, MemOperand(sp, kPointerSize));
2472
2473 // Stack here:
2474 // - target function
2475 // - this (receiver)
2476 EmitCall(expr);
2477}
2478
2479
2480void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2481 // Load the arguments.
2482 ZoneList<Expression*>* args = expr->arguments();
2483 int arg_count = args->length();
2484 for (int i = 0; i < arg_count; i++) {
2485 VisitForStackValue(args->at(i));
2486 }
2487
Ben Murdochc5610432016-08-08 18:44:38 +01002488 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002489 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002490 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2491 if (FLAG_trace) {
2492 __ CallRuntime(Runtime::kTraceTailCall);
2493 }
2494 // Update profiling counters before the tail call since we will
2495 // not return to this function.
2496 EmitProfilingCounterHandlingForReturnSequence(true);
2497 }
2498 Handle<Code> ic =
2499 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2500 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002501 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2502 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2503 // Don't assign a type feedback id to the IC, since type feedback is provided
2504 // by the vector above.
2505 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002506 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002507
2508 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002509 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002510 context()->DropAndPlug(1, r0);
2511}
2512
Ben Murdochc5610432016-08-08 18:44:38 +01002513void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2514 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002515 // r4: copy of the first argument or undefined if it doesn't exist.
2516 if (arg_count > 0) {
2517 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2518 } else {
2519 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2520 }
2521
2522 // r3: the receiver of the enclosing function.
2523 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2524
2525 // r2: language mode.
2526 __ mov(r2, Operand(Smi::FromInt(language_mode())));
2527
2528 // r1: the start position of the scope the calls resides in.
2529 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2530
Ben Murdochc5610432016-08-08 18:44:38 +01002531 // r0: the source position of the eval call.
2532 __ mov(r0, Operand(Smi::FromInt(expr->position())));
2533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002534 // Do the runtime call.
Ben Murdochc5610432016-08-08 18:44:38 +01002535 __ Push(r4, r3, r2, r1, r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002536 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2537}
2538
2539
2540// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2541void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2542 VariableProxy* callee = expr->expression()->AsVariableProxy();
2543 if (callee->var()->IsLookupSlot()) {
2544 Label slow, done;
2545 SetExpressionPosition(callee);
2546 // Generate code for loading from variables potentially shadowed
2547 // by eval-introduced variables.
2548 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2549
2550 __ bind(&slow);
2551 // Call the runtime to find the function to call (returned in r0)
2552 // and the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002553 __ Push(callee->name());
2554 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2555 PushOperands(r0, r1); // Function, receiver.
Ben Murdochc5610432016-08-08 18:44:38 +01002556 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002557
2558 // If fast case code has been generated, emit code to push the
2559 // function and receiver and have the slow path jump around this
2560 // code.
2561 if (done.is_linked()) {
2562 Label call;
2563 __ b(&call);
2564 __ bind(&done);
2565 // Push function.
2566 __ push(r0);
2567 // The receiver is implicitly the global receiver. Indicate this
2568 // by passing the hole to the call function stub.
2569 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2570 __ push(r1);
2571 __ bind(&call);
2572 }
2573 } else {
2574 VisitForStackValue(callee);
2575 // refEnv.WithBaseObject()
2576 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002577 PushOperand(r2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002578 }
2579}
2580
2581
2582void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2583 // In a call to eval, we first call
Ben Murdochc5610432016-08-08 18:44:38 +01002584 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002585 // to call. Then we call the resolved function using the given arguments.
2586 ZoneList<Expression*>* args = expr->arguments();
2587 int arg_count = args->length();
2588
2589 PushCalleeAndWithBaseObject(expr);
2590
2591 // Push the arguments.
2592 for (int i = 0; i < arg_count; i++) {
2593 VisitForStackValue(args->at(i));
2594 }
2595
2596 // Push a copy of the function (found below the arguments) and
2597 // resolve eval.
2598 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2599 __ push(r1);
Ben Murdochc5610432016-08-08 18:44:38 +01002600 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002601
2602 // Touch up the stack with the resolved function.
2603 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2604
Ben Murdochc5610432016-08-08 18:44:38 +01002605 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002606
2607 // Record source position for debugger.
2608 SetCallPosition(expr);
2609 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2610 __ mov(r0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002611 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2612 expr->tail_call_mode()),
2613 RelocInfo::CODE_TARGET);
2614 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002615 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002616 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002617 context()->DropAndPlug(1, r0);
2618}
2619
2620
2621void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2622 Comment cmnt(masm_, "[ CallNew");
2623 // According to ECMA-262, section 11.2.2, page 44, the function
2624 // expression in new calls must be evaluated before the
2625 // arguments.
2626
2627 // Push constructor on the stack. If it's not a function it's used as
2628 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2629 // ignored.
2630 DCHECK(!expr->expression()->IsSuperPropertyReference());
2631 VisitForStackValue(expr->expression());
2632
2633 // Push the arguments ("left-to-right") on the stack.
2634 ZoneList<Expression*>* args = expr->arguments();
2635 int arg_count = args->length();
2636 for (int i = 0; i < arg_count; i++) {
2637 VisitForStackValue(args->at(i));
2638 }
2639
2640 // Call the construct call builtin that handles allocation and
2641 // constructor invocation.
2642 SetConstructCallPosition(expr);
2643
2644 // Load function and argument count into r1 and r0.
2645 __ mov(r0, Operand(arg_count));
2646 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2647
2648 // Record call targets in unoptimized code.
2649 __ EmitLoadTypeFeedbackVector(r2);
2650 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2651
2652 CallConstructStub stub(isolate());
2653 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002654 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002655 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2656 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002657 context()->Plug(r0);
2658}
2659
2660
2661void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2662 SuperCallReference* super_call_ref =
2663 expr->expression()->AsSuperCallReference();
2664 DCHECK_NOT_NULL(super_call_ref);
2665
2666 // Push the super constructor target on the stack (may be null,
2667 // but the Construct builtin can deal with that properly).
2668 VisitForAccumulatorValue(super_call_ref->this_function_var());
2669 __ AssertFunction(result_register());
2670 __ ldr(result_register(),
2671 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2672 __ ldr(result_register(),
2673 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002674 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002675
2676 // Push the arguments ("left-to-right") on the stack.
2677 ZoneList<Expression*>* args = expr->arguments();
2678 int arg_count = args->length();
2679 for (int i = 0; i < arg_count; i++) {
2680 VisitForStackValue(args->at(i));
2681 }
2682
2683 // Call the construct call builtin that handles allocation and
2684 // constructor invocation.
2685 SetConstructCallPosition(expr);
2686
2687 // Load new target into r3.
2688 VisitForAccumulatorValue(super_call_ref->new_target_var());
2689 __ mov(r3, result_register());
2690
2691 // Load function and argument count into r1 and r0.
2692 __ mov(r0, Operand(arg_count));
2693 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2694
2695 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002696 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002697
2698 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002699 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002700 context()->Plug(r0);
2701}
2702
2703
2704void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2705 ZoneList<Expression*>* args = expr->arguments();
2706 DCHECK(args->length() == 1);
2707
2708 VisitForAccumulatorValue(args->at(0));
2709
2710 Label materialize_true, materialize_false;
2711 Label* if_true = NULL;
2712 Label* if_false = NULL;
2713 Label* fall_through = NULL;
2714 context()->PrepareTest(&materialize_true, &materialize_false,
2715 &if_true, &if_false, &fall_through);
2716
2717 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2718 __ SmiTst(r0);
2719 Split(eq, if_true, if_false, fall_through);
2720
2721 context()->Plug(if_true, if_false);
2722}
2723
2724
2725void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2726 ZoneList<Expression*>* args = expr->arguments();
2727 DCHECK(args->length() == 1);
2728
2729 VisitForAccumulatorValue(args->at(0));
2730
2731 Label materialize_true, materialize_false;
2732 Label* if_true = NULL;
2733 Label* if_false = NULL;
2734 Label* fall_through = NULL;
2735 context()->PrepareTest(&materialize_true, &materialize_false,
2736 &if_true, &if_false, &fall_through);
2737
2738 __ JumpIfSmi(r0, if_false);
2739 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
2740 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2741 Split(ge, if_true, if_false, fall_through);
2742
2743 context()->Plug(if_true, if_false);
2744}
2745
2746
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002747void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2748 ZoneList<Expression*>* args = expr->arguments();
2749 DCHECK(args->length() == 1);
2750
2751 VisitForAccumulatorValue(args->at(0));
2752
2753 Label materialize_true, materialize_false;
2754 Label* if_true = NULL;
2755 Label* if_false = NULL;
2756 Label* fall_through = NULL;
2757 context()->PrepareTest(&materialize_true, &materialize_false,
2758 &if_true, &if_false, &fall_through);
2759
2760 __ JumpIfSmi(r0, if_false);
2761 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2762 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2763 Split(eq, if_true, if_false, fall_through);
2764
2765 context()->Plug(if_true, if_false);
2766}
2767
2768
2769void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2770 ZoneList<Expression*>* args = expr->arguments();
2771 DCHECK(args->length() == 1);
2772
2773 VisitForAccumulatorValue(args->at(0));
2774
2775 Label materialize_true, materialize_false;
2776 Label* if_true = NULL;
2777 Label* if_false = NULL;
2778 Label* fall_through = NULL;
2779 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2780 &if_false, &fall_through);
2781
2782 __ JumpIfSmi(r0, if_false);
2783 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
2784 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2785 Split(eq, if_true, if_false, fall_through);
2786
2787 context()->Plug(if_true, if_false);
2788}
2789
2790
2791void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2792 ZoneList<Expression*>* args = expr->arguments();
2793 DCHECK(args->length() == 1);
2794
2795 VisitForAccumulatorValue(args->at(0));
2796
2797 Label materialize_true, materialize_false;
2798 Label* if_true = NULL;
2799 Label* if_false = NULL;
2800 Label* fall_through = NULL;
2801 context()->PrepareTest(&materialize_true, &materialize_false,
2802 &if_true, &if_false, &fall_through);
2803
2804 __ JumpIfSmi(r0, if_false);
2805 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2806 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2807 Split(eq, if_true, if_false, fall_through);
2808
2809 context()->Plug(if_true, if_false);
2810}
2811
2812
2813void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2814 ZoneList<Expression*>* args = expr->arguments();
2815 DCHECK(args->length() == 1);
2816
2817 VisitForAccumulatorValue(args->at(0));
2818
2819 Label materialize_true, materialize_false;
2820 Label* if_true = NULL;
2821 Label* if_false = NULL;
2822 Label* fall_through = NULL;
2823 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2824 &if_false, &fall_through);
2825
2826 __ JumpIfSmi(r0, if_false);
2827 __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
2828 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2829 Split(eq, if_true, if_false, fall_through);
2830
2831 context()->Plug(if_true, if_false);
2832}
2833
2834
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002835void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2836 ZoneList<Expression*>* args = expr->arguments();
2837 DCHECK(args->length() == 1);
2838 Label done, null, function, non_function_constructor;
2839
2840 VisitForAccumulatorValue(args->at(0));
2841
2842 // If the object is not a JSReceiver, we return null.
2843 __ JumpIfSmi(r0, &null);
2844 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2845 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
2846 // Map is now in r0.
2847 __ b(lt, &null);
2848
Ben Murdochda12d292016-06-02 14:46:10 +01002849 // Return 'Function' for JSFunction and JSBoundFunction objects.
2850 __ cmp(r1, Operand(FIRST_FUNCTION_TYPE));
2851 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2852 __ b(hs, &function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002853
2854 // Check if the constructor in the map is a JS function.
2855 Register instance_type = r2;
2856 __ GetMapConstructor(r0, r0, r1, instance_type);
2857 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
2858 __ b(ne, &non_function_constructor);
2859
2860 // r0 now contains the constructor function. Grab the
2861 // instance class name from there.
2862 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2863 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2864 __ b(&done);
2865
2866 // Functions have class 'Function'.
2867 __ bind(&function);
2868 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
2869 __ jmp(&done);
2870
2871 // Objects with a non-function constructor have class 'Object'.
2872 __ bind(&non_function_constructor);
2873 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
2874 __ jmp(&done);
2875
2876 // Non-JS objects have class null.
2877 __ bind(&null);
2878 __ LoadRoot(r0, Heap::kNullValueRootIndex);
2879
2880 // All done.
2881 __ bind(&done);
2882
2883 context()->Plug(r0);
2884}
2885
2886
2887void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2888 ZoneList<Expression*>* args = expr->arguments();
2889 DCHECK(args->length() == 1);
2890 VisitForAccumulatorValue(args->at(0)); // Load the object.
2891
2892 Label done;
2893 // If the object is a smi return the object.
2894 __ JumpIfSmi(r0, &done);
2895 // If the object is not a value type, return the object.
2896 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
2897 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
2898
2899 __ bind(&done);
2900 context()->Plug(r0);
2901}
2902
2903
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002904void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2905 ZoneList<Expression*>* args = expr->arguments();
2906 DCHECK_EQ(3, args->length());
2907
2908 Register string = r0;
2909 Register index = r1;
2910 Register value = r2;
2911
2912 VisitForStackValue(args->at(0)); // index
2913 VisitForStackValue(args->at(1)); // value
2914 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002915 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002916
2917 if (FLAG_debug_code) {
2918 __ SmiTst(value);
2919 __ Check(eq, kNonSmiValue);
2920 __ SmiTst(index);
2921 __ Check(eq, kNonSmiIndex);
2922 __ SmiUntag(index, index);
2923 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2924 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
2925 __ SmiTag(index, index);
2926 }
2927
2928 __ SmiUntag(value, value);
2929 __ add(ip,
2930 string,
2931 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2932 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
2933 context()->Plug(string);
2934}
2935
2936
2937void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2938 ZoneList<Expression*>* args = expr->arguments();
2939 DCHECK_EQ(3, args->length());
2940
2941 Register string = r0;
2942 Register index = r1;
2943 Register value = r2;
2944
2945 VisitForStackValue(args->at(0)); // index
2946 VisitForStackValue(args->at(1)); // value
2947 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002948 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002949
2950 if (FLAG_debug_code) {
2951 __ SmiTst(value);
2952 __ Check(eq, kNonSmiValue);
2953 __ SmiTst(index);
2954 __ Check(eq, kNonSmiIndex);
2955 __ SmiUntag(index, index);
2956 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2957 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
2958 __ SmiTag(index, index);
2959 }
2960
2961 __ SmiUntag(value, value);
2962 __ add(ip,
2963 string,
2964 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2965 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2966 __ strh(value, MemOperand(ip, index));
2967 context()->Plug(string);
2968}
2969
2970
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002971void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2972 ZoneList<Expression*>* args = expr->arguments();
2973 DCHECK(args->length() == 1);
2974 VisitForAccumulatorValue(args->at(0));
2975
2976 Label done;
2977 StringCharFromCodeGenerator generator(r0, r1);
2978 generator.GenerateFast(masm_);
2979 __ jmp(&done);
2980
2981 NopRuntimeCallHelper call_helper;
2982 generator.GenerateSlow(masm_, call_helper);
2983
2984 __ bind(&done);
2985 context()->Plug(r1);
2986}
2987
2988
2989void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2990 ZoneList<Expression*>* args = expr->arguments();
2991 DCHECK(args->length() == 2);
2992 VisitForStackValue(args->at(0));
2993 VisitForAccumulatorValue(args->at(1));
2994
2995 Register object = r1;
2996 Register index = r0;
2997 Register result = r3;
2998
Ben Murdoch097c5b22016-05-18 11:27:45 +01002999 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003000
3001 Label need_conversion;
3002 Label index_out_of_range;
3003 Label done;
3004 StringCharCodeAtGenerator generator(object,
3005 index,
3006 result,
3007 &need_conversion,
3008 &need_conversion,
3009 &index_out_of_range,
3010 STRING_INDEX_IS_NUMBER);
3011 generator.GenerateFast(masm_);
3012 __ jmp(&done);
3013
3014 __ bind(&index_out_of_range);
3015 // When the index is out of range, the spec requires us to return
3016 // NaN.
3017 __ LoadRoot(result, Heap::kNanValueRootIndex);
3018 __ jmp(&done);
3019
3020 __ bind(&need_conversion);
3021 // Load the undefined value into the result register, which will
3022 // trigger conversion.
3023 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3024 __ jmp(&done);
3025
3026 NopRuntimeCallHelper call_helper;
3027 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3028
3029 __ bind(&done);
3030 context()->Plug(result);
3031}
3032
3033
3034void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3035 ZoneList<Expression*>* args = expr->arguments();
3036 DCHECK(args->length() == 2);
3037 VisitForStackValue(args->at(0));
3038 VisitForAccumulatorValue(args->at(1));
3039
3040 Register object = r1;
3041 Register index = r0;
3042 Register scratch = r3;
3043 Register result = r0;
3044
Ben Murdoch097c5b22016-05-18 11:27:45 +01003045 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003046
3047 Label need_conversion;
3048 Label index_out_of_range;
3049 Label done;
3050 StringCharAtGenerator generator(object,
3051 index,
3052 scratch,
3053 result,
3054 &need_conversion,
3055 &need_conversion,
3056 &index_out_of_range,
3057 STRING_INDEX_IS_NUMBER);
3058 generator.GenerateFast(masm_);
3059 __ jmp(&done);
3060
3061 __ bind(&index_out_of_range);
3062 // When the index is out of range, the spec requires us to return
3063 // the empty string.
3064 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3065 __ jmp(&done);
3066
3067 __ bind(&need_conversion);
3068 // Move smi zero into the result register, which will trigger
3069 // conversion.
3070 __ mov(result, Operand(Smi::FromInt(0)));
3071 __ jmp(&done);
3072
3073 NopRuntimeCallHelper call_helper;
3074 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3075
3076 __ bind(&done);
3077 context()->Plug(result);
3078}
3079
3080
3081void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3082 ZoneList<Expression*>* args = expr->arguments();
3083 DCHECK_LE(2, args->length());
3084 // Push target, receiver and arguments onto the stack.
3085 for (Expression* const arg : *args) {
3086 VisitForStackValue(arg);
3087 }
Ben Murdochc5610432016-08-08 18:44:38 +01003088 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003089 // Move target to r1.
3090 int const argc = args->length() - 2;
3091 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
3092 // Call the target.
3093 __ mov(r0, Operand(argc));
3094 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003095 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003096 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003097 // Discard the function left on TOS.
3098 context()->DropAndPlug(1, r0);
3099}
3100
3101
3102void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3103 ZoneList<Expression*>* args = expr->arguments();
3104 VisitForAccumulatorValue(args->at(0));
3105
3106 Label materialize_true, materialize_false;
3107 Label* if_true = NULL;
3108 Label* if_false = NULL;
3109 Label* fall_through = NULL;
3110 context()->PrepareTest(&materialize_true, &materialize_false,
3111 &if_true, &if_false, &fall_through);
3112
3113 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3114 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3115 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3116 Split(eq, if_true, if_false, fall_through);
3117
3118 context()->Plug(if_true, if_false);
3119}
3120
3121
3122void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3123 ZoneList<Expression*>* args = expr->arguments();
3124 DCHECK(args->length() == 1);
3125 VisitForAccumulatorValue(args->at(0));
3126
3127 __ AssertString(r0);
3128
3129 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3130 __ IndexFromHash(r0, r0);
3131
3132 context()->Plug(r0);
3133}
3134
3135
3136void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3137 ZoneList<Expression*>* args = expr->arguments();
3138 DCHECK_EQ(1, args->length());
3139 VisitForAccumulatorValue(args->at(0));
3140 __ AssertFunction(r0);
3141 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3142 __ ldr(r0, FieldMemOperand(r0, Map::kPrototypeOffset));
3143 context()->Plug(r0);
3144}
3145
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003146void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3147 DCHECK(expr->arguments()->length() == 0);
3148 ExternalReference debug_is_active =
3149 ExternalReference::debug_is_active_address(isolate());
3150 __ mov(ip, Operand(debug_is_active));
3151 __ ldrb(r0, MemOperand(ip));
3152 __ SmiTag(r0);
3153 context()->Plug(r0);
3154}
3155
3156
3157void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3158 ZoneList<Expression*>* args = expr->arguments();
3159 DCHECK_EQ(2, args->length());
3160 VisitForStackValue(args->at(0));
3161 VisitForStackValue(args->at(1));
3162
3163 Label runtime, done;
3164
Ben Murdochc5610432016-08-08 18:44:38 +01003165 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime,
3166 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003167 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
3168 __ pop(r3);
3169 __ pop(r2);
3170 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
3171 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3172 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3173 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
3174 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
3175 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
3176 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3177 __ b(&done);
3178
3179 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003180 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003181
3182 __ bind(&done);
3183 context()->Plug(r0);
3184}
3185
3186
3187void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003188 // Push function.
3189 __ LoadNativeContextSlot(expr->context_index(), r0);
3190 PushOperand(r0);
3191
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003192 // Push undefined as the receiver.
3193 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003194 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003195}
3196
3197
3198void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3199 ZoneList<Expression*>* args = expr->arguments();
3200 int arg_count = args->length();
3201
3202 SetCallPosition(expr);
3203 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3204 __ mov(r0, Operand(arg_count));
3205 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3206 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003207 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003208 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003209}
3210
3211
3212void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3213 switch (expr->op()) {
3214 case Token::DELETE: {
3215 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3216 Property* property = expr->expression()->AsProperty();
3217 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3218
3219 if (property != NULL) {
3220 VisitForStackValue(property->obj());
3221 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003222 CallRuntimeWithOperands(is_strict(language_mode())
3223 ? Runtime::kDeleteProperty_Strict
3224 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003225 context()->Plug(r0);
3226 } else if (proxy != NULL) {
3227 Variable* var = proxy->var();
3228 // Delete of an unqualified identifier is disallowed in strict mode but
3229 // "delete this" is allowed.
3230 bool is_this = var->HasThisName(isolate());
3231 DCHECK(is_sloppy(language_mode()) || is_this);
3232 if (var->IsUnallocatedOrGlobalSlot()) {
3233 __ LoadGlobalObject(r2);
3234 __ mov(r1, Operand(var->name()));
3235 __ Push(r2, r1);
3236 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3237 context()->Plug(r0);
3238 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3239 // Result of deleting non-global, non-dynamic variables is false.
3240 // The subexpression does not have side effects.
3241 context()->Plug(is_this);
3242 } else {
3243 // Non-global variable. Call the runtime to try to delete from the
3244 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003245 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003246 __ CallRuntime(Runtime::kDeleteLookupSlot);
3247 context()->Plug(r0);
3248 }
3249 } else {
3250 // Result of deleting non-property, non-variable reference is true.
3251 // The subexpression may have side effects.
3252 VisitForEffect(expr->expression());
3253 context()->Plug(true);
3254 }
3255 break;
3256 }
3257
3258 case Token::VOID: {
3259 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3260 VisitForEffect(expr->expression());
3261 context()->Plug(Heap::kUndefinedValueRootIndex);
3262 break;
3263 }
3264
3265 case Token::NOT: {
3266 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3267 if (context()->IsEffect()) {
3268 // Unary NOT has no side effects so it's only necessary to visit the
3269 // subexpression. Match the optimizing compiler by not branching.
3270 VisitForEffect(expr->expression());
3271 } else if (context()->IsTest()) {
3272 const TestContext* test = TestContext::cast(context());
3273 // The labels are swapped for the recursive call.
3274 VisitForControl(expr->expression(),
3275 test->false_label(),
3276 test->true_label(),
3277 test->fall_through());
3278 context()->Plug(test->true_label(), test->false_label());
3279 } else {
3280 // We handle value contexts explicitly rather than simply visiting
3281 // for control and plugging the control flow into the context,
3282 // because we need to prepare a pair of extra administrative AST ids
3283 // for the optimizing compiler.
3284 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3285 Label materialize_true, materialize_false, done;
3286 VisitForControl(expr->expression(),
3287 &materialize_false,
3288 &materialize_true,
3289 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003290 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003291 __ bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003292 PrepareForBailoutForId(expr->MaterializeTrueId(),
3293 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003294 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3295 if (context()->IsStackValue()) __ push(r0);
3296 __ jmp(&done);
3297 __ bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003298 PrepareForBailoutForId(expr->MaterializeFalseId(),
3299 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003300 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3301 if (context()->IsStackValue()) __ push(r0);
3302 __ bind(&done);
3303 }
3304 break;
3305 }
3306
3307 case Token::TYPEOF: {
3308 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3309 {
3310 AccumulatorValueContext context(this);
3311 VisitForTypeofValue(expr->expression());
3312 }
3313 __ mov(r3, r0);
3314 TypeofStub typeof_stub(isolate());
3315 __ CallStub(&typeof_stub);
3316 context()->Plug(r0);
3317 break;
3318 }
3319
3320 default:
3321 UNREACHABLE();
3322 }
3323}
3324
3325
3326void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3327 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3328
3329 Comment cmnt(masm_, "[ CountOperation");
3330
3331 Property* prop = expr->expression()->AsProperty();
3332 LhsKind assign_type = Property::GetAssignType(prop);
3333
3334 // Evaluate expression and get value.
3335 if (assign_type == VARIABLE) {
3336 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3337 AccumulatorValueContext context(this);
3338 EmitVariableLoad(expr->expression()->AsVariableProxy());
3339 } else {
3340 // Reserve space for result of postfix operation.
3341 if (expr->is_postfix() && !context()->IsEffect()) {
3342 __ mov(ip, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003343 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003344 }
3345 switch (assign_type) {
3346 case NAMED_PROPERTY: {
3347 // Put the object both on the stack and in the register.
3348 VisitForStackValue(prop->obj());
3349 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3350 EmitNamedPropertyLoad(prop);
3351 break;
3352 }
3353
3354 case NAMED_SUPER_PROPERTY: {
3355 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3356 VisitForAccumulatorValue(
3357 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003358 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003359 const Register scratch = r1;
3360 __ ldr(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003361 PushOperand(scratch);
3362 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003363 EmitNamedSuperPropertyLoad(prop);
3364 break;
3365 }
3366
3367 case KEYED_SUPER_PROPERTY: {
3368 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3369 VisitForStackValue(
3370 prop->obj()->AsSuperPropertyReference()->home_object());
3371 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003372 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003373 const Register scratch = r1;
3374 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003375 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003376 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003377 PushOperand(scratch);
3378 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003379 EmitKeyedSuperPropertyLoad(prop);
3380 break;
3381 }
3382
3383 case KEYED_PROPERTY: {
3384 VisitForStackValue(prop->obj());
3385 VisitForStackValue(prop->key());
3386 __ ldr(LoadDescriptor::ReceiverRegister(),
3387 MemOperand(sp, 1 * kPointerSize));
3388 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3389 EmitKeyedPropertyLoad(prop);
3390 break;
3391 }
3392
3393 case VARIABLE:
3394 UNREACHABLE();
3395 }
3396 }
3397
3398 // We need a second deoptimization point after loading the value
3399 // in case evaluating the property load my have a side effect.
3400 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003401 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003402 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003403 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003404 }
3405
3406 // Inline smi case if we are in a loop.
3407 Label stub_call, done;
3408 JumpPatchSite patch_site(masm_);
3409
3410 int count_value = expr->op() == Token::INC ? 1 : -1;
3411 if (ShouldInlineSmiCase(expr->op())) {
3412 Label slow;
3413 patch_site.EmitJumpIfNotSmi(r0, &slow);
3414
3415 // Save result for postfix expressions.
3416 if (expr->is_postfix()) {
3417 if (!context()->IsEffect()) {
3418 // Save the result on the stack. If we have a named or keyed property
3419 // we store the result under the receiver that is currently on top
3420 // of the stack.
3421 switch (assign_type) {
3422 case VARIABLE:
3423 __ push(r0);
3424 break;
3425 case NAMED_PROPERTY:
3426 __ str(r0, MemOperand(sp, kPointerSize));
3427 break;
3428 case NAMED_SUPER_PROPERTY:
3429 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3430 break;
3431 case KEYED_PROPERTY:
3432 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3433 break;
3434 case KEYED_SUPER_PROPERTY:
3435 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3436 break;
3437 }
3438 }
3439 }
3440
3441 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
3442 __ b(vc, &done);
3443 // Call stub. Undo operation first.
3444 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
3445 __ jmp(&stub_call);
3446 __ bind(&slow);
3447 }
Ben Murdochda12d292016-06-02 14:46:10 +01003448
3449 // Convert old value into a number.
3450 ToNumberStub convert_stub(isolate());
3451 __ CallStub(&convert_stub);
Ben Murdochc5610432016-08-08 18:44:38 +01003452 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003453
3454 // Save result for postfix expressions.
3455 if (expr->is_postfix()) {
3456 if (!context()->IsEffect()) {
3457 // Save the result on the stack. If we have a named or keyed property
3458 // we store the result under the receiver that is currently on top
3459 // of the stack.
3460 switch (assign_type) {
3461 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003462 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003463 break;
3464 case NAMED_PROPERTY:
3465 __ str(r0, MemOperand(sp, kPointerSize));
3466 break;
3467 case NAMED_SUPER_PROPERTY:
3468 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3469 break;
3470 case KEYED_PROPERTY:
3471 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3472 break;
3473 case KEYED_SUPER_PROPERTY:
3474 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3475 break;
3476 }
3477 }
3478 }
3479
3480
3481 __ bind(&stub_call);
3482 __ mov(r1, r0);
3483 __ mov(r0, Operand(Smi::FromInt(count_value)));
3484
3485 SetExpressionPosition(expr);
3486
Ben Murdoch097c5b22016-05-18 11:27:45 +01003487 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003488 CallIC(code, expr->CountBinOpFeedbackId());
3489 patch_site.EmitPatchInfo();
3490 __ bind(&done);
3491
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003492 // Store the value returned in r0.
3493 switch (assign_type) {
3494 case VARIABLE:
3495 if (expr->is_postfix()) {
3496 { EffectContext context(this);
3497 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3498 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003499 PrepareForBailoutForId(expr->AssignmentId(),
3500 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003501 context.Plug(r0);
3502 }
3503 // For all contexts except EffectConstant We have the result on
3504 // top of the stack.
3505 if (!context()->IsEffect()) {
3506 context()->PlugTOS();
3507 }
3508 } else {
3509 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3510 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003511 PrepareForBailoutForId(expr->AssignmentId(),
3512 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003513 context()->Plug(r0);
3514 }
3515 break;
3516 case NAMED_PROPERTY: {
3517 __ mov(StoreDescriptor::NameRegister(),
3518 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003519 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003520 EmitLoadStoreICSlot(expr->CountSlot());
3521 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003522 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003523 if (expr->is_postfix()) {
3524 if (!context()->IsEffect()) {
3525 context()->PlugTOS();
3526 }
3527 } else {
3528 context()->Plug(r0);
3529 }
3530 break;
3531 }
3532 case NAMED_SUPER_PROPERTY: {
3533 EmitNamedSuperPropertyStore(prop);
3534 if (expr->is_postfix()) {
3535 if (!context()->IsEffect()) {
3536 context()->PlugTOS();
3537 }
3538 } else {
3539 context()->Plug(r0);
3540 }
3541 break;
3542 }
3543 case KEYED_SUPER_PROPERTY: {
3544 EmitKeyedSuperPropertyStore(prop);
3545 if (expr->is_postfix()) {
3546 if (!context()->IsEffect()) {
3547 context()->PlugTOS();
3548 }
3549 } else {
3550 context()->Plug(r0);
3551 }
3552 break;
3553 }
3554 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003555 PopOperands(StoreDescriptor::ReceiverRegister(),
3556 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003557 Handle<Code> ic =
3558 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3559 EmitLoadStoreICSlot(expr->CountSlot());
3560 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003561 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003562 if (expr->is_postfix()) {
3563 if (!context()->IsEffect()) {
3564 context()->PlugTOS();
3565 }
3566 } else {
3567 context()->Plug(r0);
3568 }
3569 break;
3570 }
3571 }
3572}
3573
3574
3575void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3576 Expression* sub_expr,
3577 Handle<String> check) {
3578 Label materialize_true, materialize_false;
3579 Label* if_true = NULL;
3580 Label* if_false = NULL;
3581 Label* fall_through = NULL;
3582 context()->PrepareTest(&materialize_true, &materialize_false,
3583 &if_true, &if_false, &fall_through);
3584
3585 { AccumulatorValueContext context(this);
3586 VisitForTypeofValue(sub_expr);
3587 }
3588 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3589
3590 Factory* factory = isolate()->factory();
3591 if (String::Equals(check, factory->number_string())) {
3592 __ JumpIfSmi(r0, if_true);
3593 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3594 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3595 __ cmp(r0, ip);
3596 Split(eq, if_true, if_false, fall_through);
3597 } else if (String::Equals(check, factory->string_string())) {
3598 __ JumpIfSmi(r0, if_false);
3599 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
3600 Split(lt, if_true, if_false, fall_through);
3601 } else if (String::Equals(check, factory->symbol_string())) {
3602 __ JumpIfSmi(r0, if_false);
3603 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
3604 Split(eq, if_true, if_false, fall_through);
3605 } else if (String::Equals(check, factory->boolean_string())) {
3606 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3607 __ b(eq, if_true);
3608 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
3609 Split(eq, if_true, if_false, fall_through);
3610 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003611 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3612 __ b(eq, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003613 __ JumpIfSmi(r0, if_false);
3614 // Check for undetectable objects => true.
3615 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3616 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3617 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3618 Split(ne, if_true, if_false, fall_through);
3619
3620 } else if (String::Equals(check, factory->function_string())) {
3621 __ JumpIfSmi(r0, if_false);
3622 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3623 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3624 __ and_(r1, r1,
3625 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3626 __ cmp(r1, Operand(1 << Map::kIsCallable));
3627 Split(eq, if_true, if_false, fall_through);
3628 } else if (String::Equals(check, factory->object_string())) {
3629 __ JumpIfSmi(r0, if_false);
3630 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3631 __ b(eq, if_true);
3632 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3633 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
3634 __ b(lt, if_false);
3635 // Check for callable or undetectable objects => false.
3636 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3637 __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3638 Split(eq, if_true, if_false, fall_through);
3639// clang-format off
3640#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3641 } else if (String::Equals(check, factory->type##_string())) { \
3642 __ JumpIfSmi(r0, if_false); \
3643 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); \
3644 __ CompareRoot(r0, Heap::k##Type##MapRootIndex); \
3645 Split(eq, if_true, if_false, fall_through);
3646 SIMD128_TYPES(SIMD128_TYPE)
3647#undef SIMD128_TYPE
3648 // clang-format on
3649 } else {
3650 if (if_false != fall_through) __ jmp(if_false);
3651 }
3652 context()->Plug(if_true, if_false);
3653}
3654
3655
3656void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3657 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003658
3659 // First we try a fast inlined version of the compare when one of
3660 // the operands is a literal.
3661 if (TryLiteralCompare(expr)) return;
3662
3663 // Always perform the comparison for its control flow. Pack the result
3664 // into the expression's context after the comparison is performed.
3665 Label materialize_true, materialize_false;
3666 Label* if_true = NULL;
3667 Label* if_false = NULL;
3668 Label* fall_through = NULL;
3669 context()->PrepareTest(&materialize_true, &materialize_false,
3670 &if_true, &if_false, &fall_through);
3671
3672 Token::Value op = expr->op();
3673 VisitForStackValue(expr->left());
3674 switch (op) {
3675 case Token::IN:
3676 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003677 SetExpressionPosition(expr);
3678 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003679 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3680 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3681 Split(eq, if_true, if_false, fall_through);
3682 break;
3683
3684 case Token::INSTANCEOF: {
3685 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003686 SetExpressionPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003687 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003688 InstanceOfStub stub(isolate());
3689 __ CallStub(&stub);
3690 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3691 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3692 Split(eq, if_true, if_false, fall_through);
3693 break;
3694 }
3695
3696 default: {
3697 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003698 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003699 Condition cond = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003700 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003701
3702 bool inline_smi_code = ShouldInlineSmiCase(op);
3703 JumpPatchSite patch_site(masm_);
3704 if (inline_smi_code) {
3705 Label slow_case;
3706 __ orr(r2, r0, Operand(r1));
3707 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
3708 __ cmp(r1, r0);
3709 Split(cond, if_true, if_false, NULL);
3710 __ bind(&slow_case);
3711 }
3712
Ben Murdoch097c5b22016-05-18 11:27:45 +01003713 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003714 CallIC(ic, expr->CompareOperationFeedbackId());
3715 patch_site.EmitPatchInfo();
3716 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3717 __ cmp(r0, Operand::Zero());
3718 Split(cond, if_true, if_false, fall_through);
3719 }
3720 }
3721
3722 // Convert the result of the comparison into one expected for this
3723 // expression's context.
3724 context()->Plug(if_true, if_false);
3725}
3726
3727
3728void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3729 Expression* sub_expr,
3730 NilValue nil) {
3731 Label materialize_true, materialize_false;
3732 Label* if_true = NULL;
3733 Label* if_false = NULL;
3734 Label* fall_through = NULL;
3735 context()->PrepareTest(&materialize_true, &materialize_false,
3736 &if_true, &if_false, &fall_through);
3737
3738 VisitForAccumulatorValue(sub_expr);
3739 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3740 if (expr->op() == Token::EQ_STRICT) {
3741 Heap::RootListIndex nil_value = nil == kNullValue ?
3742 Heap::kNullValueRootIndex :
3743 Heap::kUndefinedValueRootIndex;
3744 __ LoadRoot(r1, nil_value);
3745 __ cmp(r0, r1);
3746 Split(eq, if_true, if_false, fall_through);
3747 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003748 __ JumpIfSmi(r0, if_false);
3749 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3750 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3751 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3752 Split(ne, if_true, if_false, fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003753 }
3754 context()->Plug(if_true, if_false);
3755}
3756
3757
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003758Register FullCodeGenerator::result_register() {
3759 return r0;
3760}
3761
3762
3763Register FullCodeGenerator::context_register() {
3764 return cp;
3765}
3766
Ben Murdochda12d292016-06-02 14:46:10 +01003767void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3768 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3769 __ ldr(value, MemOperand(fp, frame_offset));
3770}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003771
3772void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3773 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3774 __ str(value, MemOperand(fp, frame_offset));
3775}
3776
3777
3778void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3779 __ ldr(dst, ContextMemOperand(cp, context_index));
3780}
3781
3782
3783void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3784 Scope* closure_scope = scope()->ClosureScope();
3785 if (closure_scope->is_script_scope() ||
3786 closure_scope->is_module_scope()) {
3787 // Contexts nested in the native context have a canonical empty function
3788 // as their closure, not the anonymous closure containing the global
3789 // code.
3790 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3791 } else if (closure_scope->is_eval_scope()) {
3792 // Contexts created by a call to eval have the same closure as the
3793 // context calling eval, not the anonymous closure containing the eval
3794 // code. Fetch it from the context.
3795 __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3796 } else {
3797 DCHECK(closure_scope->is_function_scope());
3798 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3799 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003800 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003801}
3802
3803
3804// ----------------------------------------------------------------------------
3805// Non-local control flow support.
3806
3807void FullCodeGenerator::EnterFinallyBlock() {
3808 DCHECK(!result_register().is(r1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003809 // Store pending message while executing finally block.
3810 ExternalReference pending_message_obj =
3811 ExternalReference::address_of_pending_message_obj(isolate());
3812 __ mov(ip, Operand(pending_message_obj));
3813 __ ldr(r1, MemOperand(ip));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003814 PushOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003815
3816 ClearPendingMessage();
3817}
3818
3819
3820void FullCodeGenerator::ExitFinallyBlock() {
3821 DCHECK(!result_register().is(r1));
3822 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003823 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003824 ExternalReference pending_message_obj =
3825 ExternalReference::address_of_pending_message_obj(isolate());
3826 __ mov(ip, Operand(pending_message_obj));
3827 __ str(r1, MemOperand(ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003828}
3829
3830
3831void FullCodeGenerator::ClearPendingMessage() {
3832 DCHECK(!result_register().is(r1));
3833 ExternalReference pending_message_obj =
3834 ExternalReference::address_of_pending_message_obj(isolate());
3835 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
3836 __ mov(ip, Operand(pending_message_obj));
3837 __ str(r1, MemOperand(ip));
3838}
3839
3840
Ben Murdoch097c5b22016-05-18 11:27:45 +01003841void FullCodeGenerator::DeferredCommands::EmitCommands() {
3842 DCHECK(!result_register().is(r1));
3843 __ Pop(result_register()); // Restore the accumulator.
3844 __ Pop(r1); // Get the token.
3845 for (DeferredCommand cmd : commands_) {
3846 Label skip;
3847 __ cmp(r1, Operand(Smi::FromInt(cmd.token)));
3848 __ b(ne, &skip);
3849 switch (cmd.command) {
3850 case kReturn:
3851 codegen_->EmitUnwindAndReturn();
3852 break;
3853 case kThrow:
3854 __ Push(result_register());
3855 __ CallRuntime(Runtime::kReThrow);
3856 break;
3857 case kContinue:
3858 codegen_->EmitContinue(cmd.target);
3859 break;
3860 case kBreak:
3861 codegen_->EmitBreak(cmd.target);
3862 break;
3863 }
3864 __ bind(&skip);
3865 }
3866}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003867
3868#undef __
3869
3870
3871static Address GetInterruptImmediateLoadAddress(Address pc) {
3872 Address load_address = pc - 2 * Assembler::kInstrSize;
3873 if (!FLAG_enable_embedded_constant_pool) {
3874 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
3875 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
3876 // This is an extended constant pool lookup.
3877 if (CpuFeatures::IsSupported(ARMv7)) {
3878 load_address -= 2 * Assembler::kInstrSize;
3879 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
3880 DCHECK(Assembler::IsMovT(
3881 Memory::int32_at(load_address + Assembler::kInstrSize)));
3882 } else {
3883 load_address -= 4 * Assembler::kInstrSize;
3884 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
3885 DCHECK(Assembler::IsOrrImmed(
3886 Memory::int32_at(load_address + Assembler::kInstrSize)));
3887 DCHECK(Assembler::IsOrrImmed(
3888 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
3889 DCHECK(Assembler::IsOrrImmed(
3890 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
3891 }
3892 } else if (CpuFeatures::IsSupported(ARMv7) &&
3893 Assembler::IsMovT(Memory::int32_at(load_address))) {
3894 // This is a movw / movt immediate load.
3895 load_address -= Assembler::kInstrSize;
3896 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
3897 } else if (!CpuFeatures::IsSupported(ARMv7) &&
3898 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
3899 // This is a mov / orr immediate load.
3900 load_address -= 3 * Assembler::kInstrSize;
3901 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
3902 DCHECK(Assembler::IsOrrImmed(
3903 Memory::int32_at(load_address + Assembler::kInstrSize)));
3904 DCHECK(Assembler::IsOrrImmed(
3905 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
3906 } else {
3907 // This is a small constant pool lookup.
3908 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
3909 }
3910 return load_address;
3911}
3912
3913
3914void BackEdgeTable::PatchAt(Code* unoptimized_code,
3915 Address pc,
3916 BackEdgeState target_state,
3917 Code* replacement_code) {
3918 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
3919 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
3920 Isolate* isolate = unoptimized_code->GetIsolate();
3921 CodePatcher patcher(isolate, branch_address, 1);
3922 switch (target_state) {
3923 case INTERRUPT:
3924 {
3925 // <decrement profiling counter>
3926 // bpl ok
3927 // ; load interrupt stub address into ip - either of (for ARMv7):
3928 // ; <small cp load> | <extended cp load> | <immediate load>
3929 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
3930 // | movt ip, #imm | movw ip, #imm
3931 // | ldr ip, [pp, ip]
3932 // ; or (for ARMv6):
3933 // ; <small cp load> | <extended cp load> | <immediate load>
3934 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
3935 // | orr ip, ip, #imm> | orr ip, ip, #imm
3936 // | orr ip, ip, #imm> | orr ip, ip, #imm
3937 // | orr ip, ip, #imm> | orr ip, ip, #imm
3938 // blx ip
3939 // <reset profiling counter>
3940 // ok-label
3941
3942 // Calculate branch offset to the ok-label - this is the difference
3943 // between the branch address and |pc| (which points at <blx ip>) plus
3944 // kProfileCounterResetSequence instructions
3945 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
3946 kProfileCounterResetSequenceLength;
3947 patcher.masm()->b(branch_offset, pl);
3948 break;
3949 }
3950 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003951 // <decrement profiling counter>
3952 // mov r0, r0 (NOP)
3953 // ; load on-stack replacement address into ip - either of (for ARMv7):
3954 // ; <small cp load> | <extended cp load> | <immediate load>
3955 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
3956 // | movt ip, #imm> | movw ip, #imm
3957 // | ldr ip, [pp, ip]
3958 // ; or (for ARMv6):
3959 // ; <small cp load> | <extended cp load> | <immediate load>
3960 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
3961 // | orr ip, ip, #imm> | orr ip, ip, #imm
3962 // | orr ip, ip, #imm> | orr ip, ip, #imm
3963 // | orr ip, ip, #imm> | orr ip, ip, #imm
3964 // blx ip
3965 // <reset profiling counter>
3966 // ok-label
3967 patcher.masm()->nop();
3968 break;
3969 }
3970
3971 // Replace the call address.
3972 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3973 unoptimized_code, replacement_code->entry());
3974
3975 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3976 unoptimized_code, pc_immediate_load_address, replacement_code);
3977}
3978
3979
3980BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3981 Isolate* isolate,
3982 Code* unoptimized_code,
3983 Address pc) {
3984 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
3985
3986 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
3987 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
Ben Murdochda12d292016-06-02 14:46:10 +01003988#ifdef DEBUG
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003989 Address interrupt_address = Assembler::target_address_at(
3990 pc_immediate_load_address, unoptimized_code);
Ben Murdochda12d292016-06-02 14:46:10 +01003991#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003992
3993 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
3994 DCHECK(interrupt_address ==
3995 isolate->builtins()->InterruptCheck()->entry());
3996 return INTERRUPT;
3997 }
3998
3999 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
4000
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004001 DCHECK(interrupt_address ==
Ben Murdochda12d292016-06-02 14:46:10 +01004002 isolate->builtins()->OnStackReplacement()->entry());
4003 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004004}
4005
4006
4007} // namespace internal
4008} // namespace v8
4009
4010#endif // V8_TARGET_ARCH_ARM