blob: 6e6a65511a4e73df8b7379a0fc16c022f8798a7d [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/arm/code-stubs-arm.h"
17#include "src/arm/macro-assembler-arm.h"
18
19namespace v8 {
20namespace internal {
21
Ben Murdoch097c5b22016-05-18 11:27:45 +010022#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023
24// A patch site is a location in the code which it is possible to patch. This
25// class has a number of methods to emit the code which is patchable and the
26// method EmitPatchInfo to record a marker back to the patchable code. This
27// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
28// immediate value is used) is the delta from the pc to the first instruction of
29// the patchable code.
30class JumpPatchSite BASE_EMBEDDED {
31 public:
32 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
33#ifdef DEBUG
34 info_emitted_ = false;
35#endif
36 }
37
38 ~JumpPatchSite() {
39 DCHECK(patch_site_.is_bound() == info_emitted_);
40 }
41
42 // When initially emitting this ensure that a jump is always generated to skip
43 // the inlined smi code.
44 void EmitJumpIfNotSmi(Register reg, Label* target) {
45 DCHECK(!patch_site_.is_bound() && !info_emitted_);
46 Assembler::BlockConstPoolScope block_const_pool(masm_);
47 __ bind(&patch_site_);
48 __ cmp(reg, Operand(reg));
49 __ b(eq, target); // Always taken before patched.
50 }
51
52 // When initially emitting this ensure that a jump is never generated to skip
53 // the inlined smi code.
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 Assembler::BlockConstPoolScope block_const_pool(masm_);
57 __ bind(&patch_site_);
58 __ cmp(reg, Operand(reg));
59 __ b(ne, target); // Never taken before patched.
60 }
61
62 void EmitPatchInfo() {
63 // Block literal pool emission whilst recording patch site information.
64 Assembler::BlockConstPoolScope block_const_pool(masm_);
65 if (patch_site_.is_bound()) {
66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
67 Register reg;
68 reg.set_code(delta_to_patch_site / kOff12Mask);
69 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
70#ifdef DEBUG
71 info_emitted_ = true;
72#endif
73 } else {
74 __ nop(); // Signals no inlined code.
75 }
76 }
77
78 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010079 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 MacroAssembler* masm_;
81 Label patch_site_;
82#ifdef DEBUG
83 bool info_emitted_;
84#endif
85};
86
87
88// Generate code for a JS function. On entry to the function the receiver
89// and arguments have been pushed on the stack left to right. The actual
90// argument count matches the formal parameter count expected by the
91// function.
92//
93// The live registers are:
94// o r1: the JS function object being called (i.e., ourselves)
95// o r3: the new target value
96// o cp: our context
97// o pp: our caller's constant pool pointer (if enabled)
98// o fp: our caller's frame pointer
99// o sp: stack pointer
100// o lr: return address
101//
102// The function builds a JS frame. Please see JavaScriptFrameConstants in
103// frames-arm.h for its layout.
104void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
115 __ ldr(r2, MemOperand(sp, receiver_offset));
116 __ AssertNotSmi(r2);
117 __ CompareObjectType(r2, r2, no_reg, FIRST_JS_RECEIVER_TYPE);
118 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119 }
120
121 // Open a frame scope to indicate that there is a frame on the stack. The
122 // MANUAL indicates that the scope shouldn't actually generate code to set up
123 // the frame (that is done below).
124 FrameScope frame_scope(masm_, StackFrame::MANUAL);
125
126 info->set_prologue_offset(masm_->pc_offset());
127 __ Prologue(info->GeneratePreagedPrologue());
128
129 { Comment cmnt(masm_, "[ Allocate locals");
130 int locals_count = info->scope()->num_stack_slots();
131 // Generators allocate locals, if any, in context slots.
132 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100133 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134 if (locals_count > 0) {
135 if (locals_count >= 128) {
136 Label ok;
137 __ sub(r9, sp, Operand(locals_count * kPointerSize));
138 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
139 __ cmp(r9, Operand(r2));
140 __ b(hs, &ok);
141 __ CallRuntime(Runtime::kThrowStackOverflow);
142 __ bind(&ok);
143 }
144 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
145 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
146 if (locals_count >= kMaxPushes) {
147 int loop_iterations = locals_count / kMaxPushes;
148 __ mov(r2, Operand(loop_iterations));
149 Label loop_header;
150 __ bind(&loop_header);
151 // Do pushes.
152 for (int i = 0; i < kMaxPushes; i++) {
153 __ push(r9);
154 }
155 // Continue loop if not done.
156 __ sub(r2, r2, Operand(1), SetCC);
157 __ b(&loop_header, ne);
158 }
159 int remaining = locals_count % kMaxPushes;
160 // Emit the remaining pushes.
161 for (int i = 0; i < remaining; i++) {
162 __ push(r9);
163 }
164 }
165 }
166
167 bool function_in_register_r1 = true;
168
169 // Possibly allocate a local context.
170 if (info->scope()->num_heap_slots() > 0) {
171 // Argument to NewContext is the function, which is still in r1.
172 Comment cmnt(masm_, "[ Allocate context");
173 bool need_write_barrier = true;
174 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
175 if (info->scope()->is_script_scope()) {
176 __ push(r1);
177 __ Push(info->scope()->GetScopeInfo(info->isolate()));
178 __ CallRuntime(Runtime::kNewScriptContext);
179 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
180 // The new target value is not used, clobbering is safe.
181 DCHECK_NULL(info->scope()->new_target_var());
182 } else {
183 if (info->scope()->new_target_var() != nullptr) {
184 __ push(r3); // Preserve new target.
185 }
186 if (slots <= FastNewContextStub::kMaximumSlots) {
187 FastNewContextStub stub(isolate(), slots);
188 __ CallStub(&stub);
189 // Result of FastNewContextStub is always in new space.
190 need_write_barrier = false;
191 } else {
192 __ push(r1);
193 __ CallRuntime(Runtime::kNewFunctionContext);
194 }
195 if (info->scope()->new_target_var() != nullptr) {
196 __ pop(r3); // Preserve new target.
197 }
198 }
199 function_in_register_r1 = false;
200 // Context is returned in r0. It replaces the context passed to us.
201 // It's saved in the stack and kept live in cp.
202 __ mov(cp, r0);
203 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
204 // Copy any necessary parameters into the context.
205 int num_parameters = info->scope()->num_parameters();
206 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
207 for (int i = first_parameter; i < num_parameters; i++) {
208 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
209 if (var->IsContextSlot()) {
210 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
211 (num_parameters - 1 - i) * kPointerSize;
212 // Load parameter from stack.
213 __ ldr(r0, MemOperand(fp, parameter_offset));
214 // Store it in the context.
215 MemOperand target = ContextMemOperand(cp, var->index());
216 __ str(r0, target);
217
218 // Update the write barrier.
219 if (need_write_barrier) {
220 __ RecordWriteContextSlot(cp, target.offset(), r0, r2,
221 kLRHasBeenSaved, kDontSaveFPRegs);
222 } else if (FLAG_debug_code) {
223 Label done;
224 __ JumpIfInNewSpace(cp, r0, &done);
225 __ Abort(kExpectedNewSpaceObject);
226 __ bind(&done);
227 }
228 }
229 }
230 }
231
232 // Register holding this function and new target are both trashed in case we
233 // bailout here. But since that can happen only when new target is not used
234 // and we allocate a context, the value of |function_in_register| is correct.
235 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
236
237 // Possibly set up a local binding to the this function which is used in
238 // derived constructors with super calls.
239 Variable* this_function_var = scope()->this_function_var();
240 if (this_function_var != nullptr) {
241 Comment cmnt(masm_, "[ This function");
242 if (!function_in_register_r1) {
243 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
244 // The write barrier clobbers register again, keep it marked as such.
245 }
246 SetVar(this_function_var, r1, r0, r2);
247 }
248
249 // Possibly set up a local binding to the new target value.
250 Variable* new_target_var = scope()->new_target_var();
251 if (new_target_var != nullptr) {
252 Comment cmnt(masm_, "[ new.target");
253 SetVar(new_target_var, r3, r0, r2);
254 }
255
256 // Possibly allocate RestParameters
257 int rest_index;
258 Variable* rest_param = scope()->rest_parameter(&rest_index);
259 if (rest_param) {
260 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100261 if (!function_in_register_r1) {
262 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
263 }
264 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100266 function_in_register_r1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 SetVar(rest_param, r0, r1, r2);
268 }
269
270 Variable* arguments = scope()->arguments();
271 if (arguments != NULL) {
272 // Function uses arguments object.
273 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000274 if (!function_in_register_r1) {
275 // Load this again, if it's used by the local context below.
276 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
277 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100278 if (is_strict(language_mode()) || !has_simple_parameters()) {
279 FastNewStrictArgumentsStub stub(isolate());
280 __ CallStub(&stub);
281 } else if (literal()->has_duplicate_parameters()) {
282 __ Push(r1);
283 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
284 } else {
285 FastNewSloppyArgumentsStub stub(isolate());
286 __ CallStub(&stub);
287 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000288
289 SetVar(arguments, r0, r1, r2);
290 }
291
292 if (FLAG_trace) {
293 __ CallRuntime(Runtime::kTraceEnter);
294 }
295
296 // Visit the declarations and body unless there is an illegal
297 // redeclaration.
298 if (scope()->HasIllegalRedeclaration()) {
299 Comment cmnt(masm_, "[ Declarations");
300 VisitForEffect(scope()->GetIllegalRedeclaration());
301
302 } else {
303 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
304 { Comment cmnt(masm_, "[ Declarations");
305 VisitDeclarations(scope()->declarations());
306 }
307
308 // Assert that the declarations do not use ICs. Otherwise the debugger
309 // won't be able to redirect a PC at an IC to the correct IC in newly
310 // recompiled code.
311 DCHECK_EQ(0, ic_total_count_);
312
313 { Comment cmnt(masm_, "[ Stack check");
314 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
315 Label ok;
316 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
317 __ cmp(sp, Operand(ip));
318 __ b(hs, &ok);
319 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
320 PredictableCodeSizeScope predictable(masm_);
321 predictable.ExpectSize(
322 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
323 __ Call(stack_check, RelocInfo::CODE_TARGET);
324 __ bind(&ok);
325 }
326
327 { Comment cmnt(masm_, "[ Body");
328 DCHECK(loop_depth() == 0);
329 VisitStatements(literal()->body());
330 DCHECK(loop_depth() == 0);
331 }
332 }
333
334 // Always emit a 'return undefined' in case control fell off the end of
335 // the body.
336 { Comment cmnt(masm_, "[ return <undefined>;");
337 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
338 }
339 EmitReturnSequence();
340
341 // Force emit the constant pool, so it doesn't get emitted in the middle
342 // of the back edge table.
343 masm()->CheckConstPool(true, false);
344}
345
346
347void FullCodeGenerator::ClearAccumulator() {
348 __ mov(r0, Operand(Smi::FromInt(0)));
349}
350
351
352void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
353 __ mov(r2, Operand(profiling_counter_));
354 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
355 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
356 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
357}
358
359
360#ifdef CAN_USE_ARMV7_INSTRUCTIONS
361static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
362#else
363static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
364#endif
365
366
367void FullCodeGenerator::EmitProfilingCounterReset() {
368 Assembler::BlockConstPoolScope block_const_pool(masm_);
369 PredictableCodeSizeScope predictable_code_size_scope(
370 masm_, kProfileCounterResetSequenceLength);
371 Label start;
372 __ bind(&start);
373 int reset_value = FLAG_interrupt_budget;
374 __ mov(r2, Operand(profiling_counter_));
375 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
376 // instructions (for ARMv6) depending upon whether it is an extended constant
377 // pool - insert nop to compensate.
378 int expected_instr_count =
379 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
380 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
381 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
382 __ nop();
383 }
384 __ mov(r3, Operand(Smi::FromInt(reset_value)));
385 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
386}
387
388
389void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
390 Label* back_edge_target) {
391 Comment cmnt(masm_, "[ Back edge bookkeeping");
392 // Block literal pools whilst emitting back edge code.
393 Assembler::BlockConstPoolScope block_const_pool(masm_);
394 Label ok;
395
396 DCHECK(back_edge_target->is_bound());
397 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
398 int weight = Min(kMaxBackEdgeWeight,
399 Max(1, distance / kCodeSizeMultiplier));
400 EmitProfilingCounterDecrement(weight);
401 __ b(pl, &ok);
402 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
403
404 // Record a mapping of this PC offset to the OSR id. This is used to find
405 // the AST id from the unoptimized code in order to use it as a key into
406 // the deoptimization input data found in the optimized code.
407 RecordBackEdge(stmt->OsrEntryId());
408
409 EmitProfilingCounterReset();
410
411 __ bind(&ok);
412 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
413 // Record a mapping of the OSR id to this PC. This is used if the OSR
414 // entry becomes the target of a bailout. We don't expect it to be, but
415 // we want it to work if it is.
416 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
417}
418
Ben Murdoch097c5b22016-05-18 11:27:45 +0100419void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
420 bool is_tail_call) {
421 // Pretend that the exit is a backwards jump to the entry.
422 int weight = 1;
423 if (info_->ShouldSelfOptimize()) {
424 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
425 } else {
426 int distance = masm_->pc_offset();
427 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
428 }
429 EmitProfilingCounterDecrement(weight);
430 Label ok;
431 __ b(pl, &ok);
432 // Don't need to save result register if we are going to do a tail call.
433 if (!is_tail_call) {
434 __ push(r0);
435 }
436 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
437 if (!is_tail_call) {
438 __ pop(r0);
439 }
440 EmitProfilingCounterReset();
441 __ bind(&ok);
442}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443
444void FullCodeGenerator::EmitReturnSequence() {
445 Comment cmnt(masm_, "[ Return sequence");
446 if (return_label_.is_bound()) {
447 __ b(&return_label_);
448 } else {
449 __ bind(&return_label_);
450 if (FLAG_trace) {
451 // Push the return value on the stack as the parameter.
452 // Runtime::TraceExit returns its parameter in r0.
453 __ push(r0);
454 __ CallRuntime(Runtime::kTraceExit);
455 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100456 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000457
458 // Make sure that the constant pool is not emitted inside of the return
459 // sequence.
460 { Assembler::BlockConstPoolScope block_const_pool(masm_);
461 int32_t arg_count = info_->scope()->num_parameters() + 1;
462 int32_t sp_delta = arg_count * kPointerSize;
463 SetReturnPosition(literal());
464 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
465 PredictableCodeSizeScope predictable(masm_, -1);
466 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
467 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
468 __ add(sp, sp, Operand(sp_delta));
469 __ Jump(lr);
470 }
471 }
472 }
473}
474
475
476void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
477 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
478 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100479 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000480}
481
482
483void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
484}
485
486
487void FullCodeGenerator::AccumulatorValueContext::Plug(
488 Heap::RootListIndex index) const {
489 __ LoadRoot(result_register(), index);
490}
491
492
493void FullCodeGenerator::StackValueContext::Plug(
494 Heap::RootListIndex index) const {
495 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100496 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000497}
498
499
500void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
501 codegen()->PrepareForBailoutBeforeSplit(condition(),
502 true,
503 true_label_,
504 false_label_);
505 if (index == Heap::kUndefinedValueRootIndex ||
506 index == Heap::kNullValueRootIndex ||
507 index == Heap::kFalseValueRootIndex) {
508 if (false_label_ != fall_through_) __ b(false_label_);
509 } else if (index == Heap::kTrueValueRootIndex) {
510 if (true_label_ != fall_through_) __ b(true_label_);
511 } else {
512 __ LoadRoot(result_register(), index);
513 codegen()->DoTest(this);
514 }
515}
516
517
518void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
519}
520
521
522void FullCodeGenerator::AccumulatorValueContext::Plug(
523 Handle<Object> lit) const {
524 __ mov(result_register(), Operand(lit));
525}
526
527
528void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
529 // Immediates cannot be pushed directly.
530 __ mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100531 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000532}
533
534
535void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
536 codegen()->PrepareForBailoutBeforeSplit(condition(),
537 true,
538 true_label_,
539 false_label_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100540 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
542 if (false_label_ != fall_through_) __ b(false_label_);
543 } else if (lit->IsTrue() || lit->IsJSObject()) {
544 if (true_label_ != fall_through_) __ b(true_label_);
545 } else if (lit->IsString()) {
546 if (String::cast(*lit)->length() == 0) {
547 if (false_label_ != fall_through_) __ b(false_label_);
548 } else {
549 if (true_label_ != fall_through_) __ b(true_label_);
550 }
551 } else if (lit->IsSmi()) {
552 if (Smi::cast(*lit)->value() == 0) {
553 if (false_label_ != fall_through_) __ b(false_label_);
554 } else {
555 if (true_label_ != fall_through_) __ b(true_label_);
556 }
557 } else {
558 // For simplicity we always test the accumulator register.
559 __ mov(result_register(), Operand(lit));
560 codegen()->DoTest(this);
561 }
562}
563
564
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
566 Register reg) const {
567 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100568 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569 __ str(reg, MemOperand(sp, 0));
570}
571
572
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000573void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
574 Label* materialize_false) const {
575 DCHECK(materialize_true == materialize_false);
576 __ bind(materialize_true);
577}
578
579
580void FullCodeGenerator::AccumulatorValueContext::Plug(
581 Label* materialize_true,
582 Label* materialize_false) const {
583 Label done;
584 __ bind(materialize_true);
585 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
586 __ jmp(&done);
587 __ bind(materialize_false);
588 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
589 __ bind(&done);
590}
591
592
593void FullCodeGenerator::StackValueContext::Plug(
594 Label* materialize_true,
595 Label* materialize_false) const {
596 Label done;
597 __ bind(materialize_true);
598 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
599 __ jmp(&done);
600 __ bind(materialize_false);
601 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
602 __ bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100603 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000604}
605
606
607void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
608 Label* materialize_false) const {
609 DCHECK(materialize_true == true_label_);
610 DCHECK(materialize_false == false_label_);
611}
612
613
614void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
615 Heap::RootListIndex value_root_index =
616 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
617 __ LoadRoot(result_register(), value_root_index);
618}
619
620
621void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
622 Heap::RootListIndex value_root_index =
623 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
624 __ LoadRoot(ip, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100625 codegen()->PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000626}
627
628
629void FullCodeGenerator::TestContext::Plug(bool flag) const {
630 codegen()->PrepareForBailoutBeforeSplit(condition(),
631 true,
632 true_label_,
633 false_label_);
634 if (flag) {
635 if (true_label_ != fall_through_) __ b(true_label_);
636 } else {
637 if (false_label_ != fall_through_) __ b(false_label_);
638 }
639}
640
641
642void FullCodeGenerator::DoTest(Expression* condition,
643 Label* if_true,
644 Label* if_false,
645 Label* fall_through) {
646 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
647 CallIC(ic, condition->test_id());
648 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
649 Split(eq, if_true, if_false, fall_through);
650}
651
652
653void FullCodeGenerator::Split(Condition cond,
654 Label* if_true,
655 Label* if_false,
656 Label* fall_through) {
657 if (if_false == fall_through) {
658 __ b(cond, if_true);
659 } else if (if_true == fall_through) {
660 __ b(NegateCondition(cond), if_false);
661 } else {
662 __ b(cond, if_true);
663 __ b(if_false);
664 }
665}
666
667
668MemOperand FullCodeGenerator::StackOperand(Variable* var) {
669 DCHECK(var->IsStackAllocated());
670 // Offset is negative because higher indexes are at lower addresses.
671 int offset = -var->index() * kPointerSize;
672 // Adjust by a (parameter or local) base offset.
673 if (var->IsParameter()) {
674 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
675 } else {
676 offset += JavaScriptFrameConstants::kLocal0Offset;
677 }
678 return MemOperand(fp, offset);
679}
680
681
682MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
683 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
684 if (var->IsContextSlot()) {
685 int context_chain_length = scope()->ContextChainLength(var->scope());
686 __ LoadContext(scratch, context_chain_length);
687 return ContextMemOperand(scratch, var->index());
688 } else {
689 return StackOperand(var);
690 }
691}
692
693
694void FullCodeGenerator::GetVar(Register dest, Variable* var) {
695 // Use destination as scratch.
696 MemOperand location = VarOperand(var, dest);
697 __ ldr(dest, location);
698}
699
700
701void FullCodeGenerator::SetVar(Variable* var,
702 Register src,
703 Register scratch0,
704 Register scratch1) {
705 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
706 DCHECK(!scratch0.is(src));
707 DCHECK(!scratch0.is(scratch1));
708 DCHECK(!scratch1.is(src));
709 MemOperand location = VarOperand(var, scratch0);
710 __ str(src, location);
711
712 // Emit the write barrier code if the location is in the heap.
713 if (var->IsContextSlot()) {
714 __ RecordWriteContextSlot(scratch0,
715 location.offset(),
716 src,
717 scratch1,
718 kLRHasBeenSaved,
719 kDontSaveFPRegs);
720 }
721}
722
723
724void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
725 bool should_normalize,
726 Label* if_true,
727 Label* if_false) {
728 // Only prepare for bailouts before splits if we're in a test
729 // context. Otherwise, we let the Visit function deal with the
730 // preparation to avoid preparing with the same AST id twice.
731 if (!context()->IsTest()) return;
732
733 Label skip;
734 if (should_normalize) __ b(&skip);
735 PrepareForBailout(expr, TOS_REG);
736 if (should_normalize) {
737 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
738 __ cmp(r0, ip);
739 Split(eq, if_true, if_false, NULL);
740 __ bind(&skip);
741 }
742}
743
744
745void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
746 // The variable in the declaration always resides in the current function
747 // context.
748 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100749 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000750 // Check that we're not inside a with or catch context.
751 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
752 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
753 __ Check(ne, kDeclarationInWithContext);
754 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
755 __ Check(ne, kDeclarationInCatchContext);
756 }
757}
758
759
760void FullCodeGenerator::VisitVariableDeclaration(
761 VariableDeclaration* declaration) {
762 // If it was not possible to allocate the variable at compile time, we
763 // need to "declare" it at runtime to make sure it actually exists in the
764 // local context.
765 VariableProxy* proxy = declaration->proxy();
766 VariableMode mode = declaration->mode();
767 Variable* variable = proxy->var();
768 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
769 switch (variable->location()) {
770 case VariableLocation::GLOBAL:
771 case VariableLocation::UNALLOCATED:
772 globals_->Add(variable->name(), zone());
773 globals_->Add(variable->binding_needs_init()
774 ? isolate()->factory()->the_hole_value()
775 : isolate()->factory()->undefined_value(),
776 zone());
777 break;
778
779 case VariableLocation::PARAMETER:
780 case VariableLocation::LOCAL:
781 if (hole_init) {
782 Comment cmnt(masm_, "[ VariableDeclaration");
783 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
784 __ str(r0, StackOperand(variable));
785 }
786 break;
787
788 case VariableLocation::CONTEXT:
789 if (hole_init) {
790 Comment cmnt(masm_, "[ VariableDeclaration");
791 EmitDebugCheckDeclarationContext(variable);
792 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
793 __ str(r0, ContextMemOperand(cp, variable->index()));
794 // No write barrier since the_hole_value is in old space.
795 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
796 }
797 break;
798
799 case VariableLocation::LOOKUP: {
800 Comment cmnt(masm_, "[ VariableDeclaration");
801 __ mov(r2, Operand(variable->name()));
802 // Declaration nodes are always introduced in one of four modes.
803 DCHECK(IsDeclaredVariableMode(mode));
804 // Push initial value, if any.
805 // Note: For variables we must not push an initial value (such as
806 // 'undefined') because we may have a (legal) redeclaration and we
807 // must not destroy the current value.
808 if (hole_init) {
809 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
810 } else {
811 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
812 }
813 __ Push(r2, r0);
814 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
815 __ CallRuntime(Runtime::kDeclareLookupSlot);
816 break;
817 }
818 }
819}
820
821
822void FullCodeGenerator::VisitFunctionDeclaration(
823 FunctionDeclaration* declaration) {
824 VariableProxy* proxy = declaration->proxy();
825 Variable* variable = proxy->var();
826 switch (variable->location()) {
827 case VariableLocation::GLOBAL:
828 case VariableLocation::UNALLOCATED: {
829 globals_->Add(variable->name(), zone());
830 Handle<SharedFunctionInfo> function =
831 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
832 // Check for stack-overflow exception.
833 if (function.is_null()) return SetStackOverflow();
834 globals_->Add(function, zone());
835 break;
836 }
837
838 case VariableLocation::PARAMETER:
839 case VariableLocation::LOCAL: {
840 Comment cmnt(masm_, "[ FunctionDeclaration");
841 VisitForAccumulatorValue(declaration->fun());
842 __ str(result_register(), StackOperand(variable));
843 break;
844 }
845
846 case VariableLocation::CONTEXT: {
847 Comment cmnt(masm_, "[ FunctionDeclaration");
848 EmitDebugCheckDeclarationContext(variable);
849 VisitForAccumulatorValue(declaration->fun());
850 __ str(result_register(), ContextMemOperand(cp, variable->index()));
851 int offset = Context::SlotOffset(variable->index());
852 // We know that we have written a function, which is not a smi.
853 __ RecordWriteContextSlot(cp,
854 offset,
855 result_register(),
856 r2,
857 kLRHasBeenSaved,
858 kDontSaveFPRegs,
859 EMIT_REMEMBERED_SET,
860 OMIT_SMI_CHECK);
861 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
862 break;
863 }
864
865 case VariableLocation::LOOKUP: {
866 Comment cmnt(masm_, "[ FunctionDeclaration");
867 __ mov(r2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100868 PushOperand(r2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000869 // Push initial value for function declaration.
870 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100871 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
872 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873 break;
874 }
875 }
876}
877
878
879void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
880 // Call the runtime to declare the globals.
881 __ mov(r1, Operand(pairs));
882 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
883 __ Push(r1, r0);
884 __ CallRuntime(Runtime::kDeclareGlobals);
885 // Return value is ignored.
886}
887
888
889void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
890 // Call the runtime to declare the modules.
891 __ Push(descriptions);
892 __ CallRuntime(Runtime::kDeclareModules);
893 // Return value is ignored.
894}
895
896
897void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
898 Comment cmnt(masm_, "[ SwitchStatement");
899 Breakable nested_statement(this, stmt);
900 SetStatementPosition(stmt);
901
902 // Keep the switch value on the stack until a case matches.
903 VisitForStackValue(stmt->tag());
904 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
905
906 ZoneList<CaseClause*>* clauses = stmt->cases();
907 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
908
909 Label next_test; // Recycled for each test.
910 // Compile all the tests with branches to their bodies.
911 for (int i = 0; i < clauses->length(); i++) {
912 CaseClause* clause = clauses->at(i);
913 clause->body_target()->Unuse();
914
915 // The default is not a test, but remember it as final fall through.
916 if (clause->is_default()) {
917 default_clause = clause;
918 continue;
919 }
920
921 Comment cmnt(masm_, "[ Case comparison");
922 __ bind(&next_test);
923 next_test.Unuse();
924
925 // Compile the label expression.
926 VisitForAccumulatorValue(clause->label());
927
928 // Perform the comparison as if via '==='.
929 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
930 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
931 JumpPatchSite patch_site(masm_);
932 if (inline_smi_code) {
933 Label slow_case;
934 __ orr(r2, r1, r0);
935 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
936
937 __ cmp(r1, r0);
938 __ b(ne, &next_test);
939 __ Drop(1); // Switch value is no longer needed.
940 __ b(clause->body_target());
941 __ bind(&slow_case);
942 }
943
944 // Record position before stub call for type feedback.
945 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100946 Handle<Code> ic =
947 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948 CallIC(ic, clause->CompareId());
949 patch_site.EmitPatchInfo();
950
951 Label skip;
952 __ b(&skip);
953 PrepareForBailout(clause, TOS_REG);
954 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
955 __ cmp(r0, ip);
956 __ b(ne, &next_test);
957 __ Drop(1);
958 __ jmp(clause->body_target());
959 __ bind(&skip);
960
961 __ cmp(r0, Operand::Zero());
962 __ b(ne, &next_test);
963 __ Drop(1); // Switch value is no longer needed.
964 __ b(clause->body_target());
965 }
966
967 // Discard the test value and jump to the default if present, otherwise to
968 // the end of the statement.
969 __ bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100970 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000971 if (default_clause == NULL) {
972 __ b(nested_statement.break_label());
973 } else {
974 __ b(default_clause->body_target());
975 }
976
977 // Compile all the case bodies.
978 for (int i = 0; i < clauses->length(); i++) {
979 Comment cmnt(masm_, "[ Case body");
980 CaseClause* clause = clauses->at(i);
981 __ bind(clause->body_target());
982 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
983 VisitStatements(clause->statements());
984 }
985
986 __ bind(nested_statement.break_label());
987 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
988}
989
990
991void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
992 Comment cmnt(masm_, "[ ForInStatement");
993 SetStatementPosition(stmt, SKIP_BREAK);
994
995 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
996
997 Label loop, exit;
998 ForIn loop_statement(this, stmt);
999 increment_loop_depth();
1000
Ben Murdoch097c5b22016-05-18 11:27:45 +01001001 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001002 SetExpressionAsStatementPosition(stmt->enumerable());
1003 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001004 OperandStackDepthIncrement(ForIn::kElementCount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005
Ben Murdoch097c5b22016-05-18 11:27:45 +01001006 // If the object is null or undefined, skip over the loop, otherwise convert
1007 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001008 Label convert, done_convert;
1009 __ JumpIfSmi(r0, &convert);
1010 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
1011 __ b(ge, &done_convert);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001012 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1013 __ b(eq, &exit);
1014 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1015 __ b(eq, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001016 __ bind(&convert);
1017 ToObjectStub stub(isolate());
1018 __ CallStub(&stub);
1019 __ bind(&done_convert);
1020 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1021 __ push(r0);
1022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001023 // Check cache validity in generated code. This is a fast case for
1024 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1025 // guarantee cache validity, call the runtime system to check cache
1026 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001027 // Note: Proxies never have an enum cache, so will always take the
1028 // slow path.
1029 Label call_runtime;
1030 __ CheckEnumCache(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001031
1032 // The enum cache is valid. Load the map of the object being
1033 // iterated over and use the cache for the iteration.
1034 Label use_cache;
1035 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1036 __ b(&use_cache);
1037
1038 // Get the set of properties to enumerate.
1039 __ bind(&call_runtime);
1040 __ push(r0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001041 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1043
1044 // If we got a map from the runtime call, we can do a fast
1045 // modification check. Otherwise, we got a fixed array, and we have
1046 // to do a slow check.
1047 Label fixed_array;
1048 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1049 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1050 __ cmp(r2, ip);
1051 __ b(ne, &fixed_array);
1052
1053 // We got a map in register r0. Get the enumeration cache from it.
1054 Label no_descriptors;
1055 __ bind(&use_cache);
1056
1057 __ EnumLength(r1, r0);
1058 __ cmp(r1, Operand(Smi::FromInt(0)));
1059 __ b(eq, &no_descriptors);
1060
1061 __ LoadInstanceDescriptors(r0, r2);
1062 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1063 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1064
1065 // Set up the four remaining stack slots.
1066 __ push(r0); // Map.
1067 __ mov(r0, Operand(Smi::FromInt(0)));
1068 // Push enumeration cache, enumeration cache length (as smi) and zero.
1069 __ Push(r2, r1, r0);
1070 __ jmp(&loop);
1071
1072 __ bind(&no_descriptors);
1073 __ Drop(1);
1074 __ jmp(&exit);
1075
1076 // We got a fixed array in register r0. Iterate through that.
1077 __ bind(&fixed_array);
1078
Ben Murdoch097c5b22016-05-18 11:27:45 +01001079 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080 __ EmitLoadTypeFeedbackVector(r1);
1081 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001082 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1083 __ mov(r1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1084 __ Push(r1, r0); // Smi and array
1085 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001086 __ Push(r1); // Fixed array length (as smi).
1087 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088 __ mov(r0, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001089 __ Push(r0); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001090
1091 // Generate code for doing the condition check.
1092 __ bind(&loop);
1093 SetExpressionAsStatementPosition(stmt->each());
1094
1095 // Load the current count to r0, load the length to r1.
1096 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1097 __ cmp(r0, r1); // Compare to the array length.
1098 __ b(hs, loop_statement.break_label());
1099
1100 // Get the current entry of the array into register r3.
1101 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1102 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1103 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1104
1105 // Get the expected map from the stack or a smi in the
1106 // permanent slow case into register r2.
1107 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1108
1109 // Check if the expected map still matches that of the enumerable.
1110 // If not, we may have to filter the key.
1111 Label update_each;
1112 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1113 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1114 __ cmp(r4, Operand(r2));
1115 __ b(eq, &update_each);
1116
Ben Murdoch097c5b22016-05-18 11:27:45 +01001117 // We might get here from TurboFan or Crankshaft when something in the
1118 // for-in loop body deopts and only now notice in fullcodegen, that we
1119 // can now longer use the enum cache, i.e. left fast mode. So better record
1120 // this information here, in case we later OSR back into this loop or
1121 // reoptimize the whole function w/o rerunning the loop with the slow
1122 // mode object in fullcodegen (which would result in a deopt loop).
1123 __ EmitLoadTypeFeedbackVector(r0);
1124 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1125 __ str(r2, FieldMemOperand(r0, FixedArray::OffsetOfElementAt(vector_index)));
1126
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001127 // Convert the entry to a string or (smi) 0 if it isn't a property
1128 // any more. If the property has been removed while iterating, we
1129 // just skip it.
1130 __ push(r1); // Enumerable.
1131 __ push(r3); // Current entry.
1132 __ CallRuntime(Runtime::kForInFilter);
1133 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1134 __ mov(r3, Operand(r0));
1135 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1136 __ cmp(r0, ip);
1137 __ b(eq, loop_statement.continue_label());
1138
1139 // Update the 'each' property or variable from the possibly filtered
1140 // entry in register r3.
1141 __ bind(&update_each);
1142 __ mov(result_register(), r3);
1143 // Perform the assignment as if via '='.
1144 { EffectContext context(this);
1145 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1146 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1147 }
1148
1149 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1150 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1151 // Generate code for the body of the loop.
1152 Visit(stmt->body());
1153
1154 // Generate code for the going to the next element by incrementing
1155 // the index (smi) stored on top of the stack.
1156 __ bind(loop_statement.continue_label());
1157 __ pop(r0);
1158 __ add(r0, r0, Operand(Smi::FromInt(1)));
1159 __ push(r0);
1160
1161 EmitBackEdgeBookkeeping(stmt, &loop);
1162 __ b(&loop);
1163
1164 // Remove the pointers stored on the stack.
1165 __ bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001166 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001167
1168 // Exit and decrement the loop depth.
1169 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1170 __ bind(&exit);
1171 decrement_loop_depth();
1172}
1173
1174
1175void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1176 bool pretenure) {
1177 // Use the fast case closure allocation code that allocates in new
1178 // space for nested functions that don't need literals cloning. If
1179 // we're running with the --always-opt or the --prepare-always-opt
1180 // flag, we need to use the runtime function so that the new function
1181 // we are creating here gets a chance to have its code optimized and
1182 // doesn't just get a copy of the existing unoptimized code.
1183 if (!FLAG_always_opt &&
1184 !FLAG_prepare_always_opt &&
1185 !pretenure &&
1186 scope()->is_function_scope() &&
1187 info->num_literals() == 0) {
1188 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1189 __ mov(r2, Operand(info));
1190 __ CallStub(&stub);
1191 } else {
1192 __ Push(info);
1193 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1194 : Runtime::kNewClosure);
1195 }
1196 context()->Plug(r0);
1197}
1198
1199
1200void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1201 FeedbackVectorSlot slot) {
1202 DCHECK(NeedsHomeObject(initializer));
1203 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1204 __ mov(StoreDescriptor::NameRegister(),
1205 Operand(isolate()->factory()->home_object_symbol()));
1206 __ ldr(StoreDescriptor::ValueRegister(),
1207 MemOperand(sp, offset * kPointerSize));
1208 EmitLoadStoreICSlot(slot);
1209 CallStoreIC();
1210}
1211
1212
1213void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1214 int offset,
1215 FeedbackVectorSlot slot) {
1216 DCHECK(NeedsHomeObject(initializer));
1217 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1218 __ mov(StoreDescriptor::NameRegister(),
1219 Operand(isolate()->factory()->home_object_symbol()));
1220 __ ldr(StoreDescriptor::ValueRegister(),
1221 MemOperand(sp, offset * kPointerSize));
1222 EmitLoadStoreICSlot(slot);
1223 CallStoreIC();
1224}
1225
1226
1227void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1228 TypeofMode typeof_mode,
1229 Label* slow) {
1230 Register current = cp;
1231 Register next = r1;
1232 Register temp = r2;
1233
1234 Scope* s = scope();
1235 while (s != NULL) {
1236 if (s->num_heap_slots() > 0) {
1237 if (s->calls_sloppy_eval()) {
1238 // Check that extension is "the hole".
1239 __ ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1240 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1241 }
1242 // Load next context in chain.
1243 __ ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1244 // Walk the rest of the chain without clobbering cp.
1245 current = next;
1246 }
1247 // If no outer scope calls eval, we do not need to check more
1248 // context extensions.
1249 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1250 s = s->outer_scope();
1251 }
1252
1253 if (s->is_eval_scope()) {
1254 Label loop, fast;
1255 if (!current.is(next)) {
1256 __ Move(next, current);
1257 }
1258 __ bind(&loop);
1259 // Terminate at native context.
1260 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1261 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1262 __ cmp(temp, ip);
1263 __ b(eq, &fast);
1264 // Check that extension is "the hole".
1265 __ ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1266 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1267 // Load next context in chain.
1268 __ ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1269 __ b(&loop);
1270 __ bind(&fast);
1271 }
1272
1273 // All extension objects were empty and it is safe to use a normal global
1274 // load machinery.
1275 EmitGlobalVariableLoad(proxy, typeof_mode);
1276}
1277
1278
1279MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1280 Label* slow) {
1281 DCHECK(var->IsContextSlot());
1282 Register context = cp;
1283 Register next = r3;
1284 Register temp = r4;
1285
1286 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1287 if (s->num_heap_slots() > 0) {
1288 if (s->calls_sloppy_eval()) {
1289 // Check that extension is "the hole".
1290 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1291 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1292 }
1293 __ ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1294 // Walk the rest of the chain without clobbering cp.
1295 context = next;
1296 }
1297 }
1298 // Check that last extension is "the hole".
1299 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1300 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1301
1302 // This function is used only for loads, not stores, so it's safe to
1303 // return an cp-based operand (the write barrier cannot be allowed to
1304 // destroy the cp register).
1305 return ContextMemOperand(context, var->index());
1306}
1307
1308
1309void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1310 TypeofMode typeof_mode,
1311 Label* slow, Label* done) {
1312 // Generate fast-case code for variables that might be shadowed by
1313 // eval-introduced variables. Eval is used a lot without
1314 // introducing variables. In those cases, we do not want to
1315 // perform a runtime call for all variables in the scope
1316 // containing the eval.
1317 Variable* var = proxy->var();
1318 if (var->mode() == DYNAMIC_GLOBAL) {
1319 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1320 __ jmp(done);
1321 } else if (var->mode() == DYNAMIC_LOCAL) {
1322 Variable* local = var->local_if_not_shadowed();
1323 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1324 if (local->mode() == LET || local->mode() == CONST ||
1325 local->mode() == CONST_LEGACY) {
1326 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1327 if (local->mode() == CONST_LEGACY) {
1328 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1329 } else { // LET || CONST
1330 __ b(ne, done);
1331 __ mov(r0, Operand(var->name()));
1332 __ push(r0);
1333 __ CallRuntime(Runtime::kThrowReferenceError);
1334 }
1335 }
1336 __ jmp(done);
1337 }
1338}
1339
1340
1341void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1342 TypeofMode typeof_mode) {
1343 Variable* var = proxy->var();
1344 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1345 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1346 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1347 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1348 __ mov(LoadDescriptor::SlotRegister(),
1349 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1350 CallLoadIC(typeof_mode);
1351}
1352
1353
1354void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1355 TypeofMode typeof_mode) {
1356 // Record position before possible IC call.
1357 SetExpressionPosition(proxy);
1358 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1359 Variable* var = proxy->var();
1360
1361 // Three cases: global variables, lookup variables, and all other types of
1362 // variables.
1363 switch (var->location()) {
1364 case VariableLocation::GLOBAL:
1365 case VariableLocation::UNALLOCATED: {
1366 Comment cmnt(masm_, "[ Global variable");
1367 EmitGlobalVariableLoad(proxy, typeof_mode);
1368 context()->Plug(r0);
1369 break;
1370 }
1371
1372 case VariableLocation::PARAMETER:
1373 case VariableLocation::LOCAL:
1374 case VariableLocation::CONTEXT: {
1375 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1376 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1377 : "[ Stack variable");
1378 if (NeedsHoleCheckForLoad(proxy)) {
1379 // Let and const need a read barrier.
1380 GetVar(r0, var);
1381 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1382 if (var->mode() == LET || var->mode() == CONST) {
1383 // Throw a reference error when using an uninitialized let/const
1384 // binding in harmony mode.
1385 Label done;
1386 __ b(ne, &done);
1387 __ mov(r0, Operand(var->name()));
1388 __ push(r0);
1389 __ CallRuntime(Runtime::kThrowReferenceError);
1390 __ bind(&done);
1391 } else {
1392 // Uninitialized legacy const bindings are unholed.
1393 DCHECK(var->mode() == CONST_LEGACY);
1394 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1395 }
1396 context()->Plug(r0);
1397 break;
1398 }
1399 context()->Plug(var);
1400 break;
1401 }
1402
1403 case VariableLocation::LOOKUP: {
1404 Comment cmnt(masm_, "[ Lookup variable");
1405 Label done, slow;
1406 // Generate code for loading from variables potentially shadowed
1407 // by eval-introduced variables.
1408 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1409 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001410 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001411 Runtime::FunctionId function_id =
1412 typeof_mode == NOT_INSIDE_TYPEOF
1413 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001414 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 __ CallRuntime(function_id);
1416 __ bind(&done);
1417 context()->Plug(r0);
1418 }
1419 }
1420}
1421
1422
1423void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1424 Comment cmnt(masm_, "[ RegExpLiteral");
1425 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1426 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1427 __ mov(r1, Operand(expr->pattern()));
1428 __ mov(r0, Operand(Smi::FromInt(expr->flags())));
1429 FastCloneRegExpStub stub(isolate());
1430 __ CallStub(&stub);
1431 context()->Plug(r0);
1432}
1433
1434
1435void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1436 Expression* expression = (property == NULL) ? NULL : property->value();
1437 if (expression == NULL) {
1438 __ LoadRoot(r1, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001439 PushOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440 } else {
1441 VisitForStackValue(expression);
1442 if (NeedsHomeObject(expression)) {
1443 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1444 property->kind() == ObjectLiteral::Property::SETTER);
1445 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1446 EmitSetHomeObject(expression, offset, property->GetSlot());
1447 }
1448 }
1449}
1450
1451
1452void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1453 Comment cmnt(masm_, "[ ObjectLiteral");
1454
1455 Handle<FixedArray> constant_properties = expr->constant_properties();
1456 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1457 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1458 __ mov(r1, Operand(constant_properties));
1459 int flags = expr->ComputeFlags();
1460 __ mov(r0, Operand(Smi::FromInt(flags)));
1461 if (MustCreateObjectLiteralWithRuntime(expr)) {
1462 __ Push(r3, r2, r1, r0);
1463 __ CallRuntime(Runtime::kCreateObjectLiteral);
1464 } else {
1465 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1466 __ CallStub(&stub);
1467 }
1468 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1469
1470 // If result_saved is true the result is on top of the stack. If
1471 // result_saved is false the result is in r0.
1472 bool result_saved = false;
1473
1474 AccessorTable accessor_table(zone());
1475 int property_index = 0;
1476 for (; property_index < expr->properties()->length(); property_index++) {
1477 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1478 if (property->is_computed_name()) break;
1479 if (property->IsCompileTimeValue()) continue;
1480
1481 Literal* key = property->key()->AsLiteral();
1482 Expression* value = property->value();
1483 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001484 PushOperand(r0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001485 result_saved = true;
1486 }
1487 switch (property->kind()) {
1488 case ObjectLiteral::Property::CONSTANT:
1489 UNREACHABLE();
1490 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1491 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1492 // Fall through.
1493 case ObjectLiteral::Property::COMPUTED:
1494 // It is safe to use [[Put]] here because the boilerplate already
1495 // contains computed properties with an uninitialized value.
1496 if (key->value()->IsInternalizedString()) {
1497 if (property->emit_store()) {
1498 VisitForAccumulatorValue(value);
1499 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1500 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1501 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1502 EmitLoadStoreICSlot(property->GetSlot(0));
1503 CallStoreIC();
1504 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1505
1506 if (NeedsHomeObject(value)) {
1507 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1508 }
1509 } else {
1510 VisitForEffect(value);
1511 }
1512 break;
1513 }
1514 // Duplicate receiver on stack.
1515 __ ldr(r0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001516 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001517 VisitForStackValue(key);
1518 VisitForStackValue(value);
1519 if (property->emit_store()) {
1520 if (NeedsHomeObject(value)) {
1521 EmitSetHomeObject(value, 2, property->GetSlot());
1522 }
1523 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
Ben Murdoch097c5b22016-05-18 11:27:45 +01001524 PushOperand(r0);
1525 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001527 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528 }
1529 break;
1530 case ObjectLiteral::Property::PROTOTYPE:
1531 // Duplicate receiver on stack.
1532 __ ldr(r0, MemOperand(sp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001533 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 VisitForStackValue(value);
1535 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001536 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1538 NO_REGISTERS);
1539 break;
1540
1541 case ObjectLiteral::Property::GETTER:
1542 if (property->emit_store()) {
1543 accessor_table.lookup(key)->second->getter = property;
1544 }
1545 break;
1546 case ObjectLiteral::Property::SETTER:
1547 if (property->emit_store()) {
1548 accessor_table.lookup(key)->second->setter = property;
1549 }
1550 break;
1551 }
1552 }
1553
1554 // Emit code to define accessors, using only a single call to the runtime for
1555 // each pair of corresponding getters and setters.
1556 for (AccessorTable::Iterator it = accessor_table.begin();
1557 it != accessor_table.end();
1558 ++it) {
1559 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001560 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001561 VisitForStackValue(it->first);
1562 EmitAccessor(it->second->getter);
1563 EmitAccessor(it->second->setter);
1564 __ mov(r0, Operand(Smi::FromInt(NONE)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001565 PushOperand(r0);
1566 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001567 }
1568
1569 // Object literals have two parts. The "static" part on the left contains no
1570 // computed property names, and so we can compute its map ahead of time; see
1571 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1572 // starts with the first computed property name, and continues with all
1573 // properties to its right. All the code from above initializes the static
1574 // component of the object literal, and arranges for the map of the result to
1575 // reflect the static order in which the keys appear. For the dynamic
1576 // properties, we compile them into a series of "SetOwnProperty" runtime
1577 // calls. This will preserve insertion order.
1578 for (; property_index < expr->properties()->length(); property_index++) {
1579 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1580
1581 Expression* value = property->value();
1582 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001583 PushOperand(r0); // Save result on the stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001584 result_saved = true;
1585 }
1586
1587 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001588 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001589
1590 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1591 DCHECK(!property->is_computed_name());
1592 VisitForStackValue(value);
1593 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001594 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001595 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1596 NO_REGISTERS);
1597 } else {
1598 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1599 VisitForStackValue(value);
1600 if (NeedsHomeObject(value)) {
1601 EmitSetHomeObject(value, 2, property->GetSlot());
1602 }
1603
1604 switch (property->kind()) {
1605 case ObjectLiteral::Property::CONSTANT:
1606 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1607 case ObjectLiteral::Property::COMPUTED:
1608 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001609 PushOperand(Smi::FromInt(NONE));
1610 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1611 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001612 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001613 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001614 }
1615 break;
1616
1617 case ObjectLiteral::Property::PROTOTYPE:
1618 UNREACHABLE();
1619 break;
1620
1621 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001622 PushOperand(Smi::FromInt(NONE));
1623 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001624 break;
1625
1626 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001627 PushOperand(Smi::FromInt(NONE));
1628 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001629 break;
1630 }
1631 }
1632 }
1633
1634 if (expr->has_function()) {
1635 DCHECK(result_saved);
1636 __ ldr(r0, MemOperand(sp));
1637 __ push(r0);
1638 __ CallRuntime(Runtime::kToFastProperties);
1639 }
1640
1641 if (result_saved) {
1642 context()->PlugTOS();
1643 } else {
1644 context()->Plug(r0);
1645 }
1646}
1647
1648
1649void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1650 Comment cmnt(masm_, "[ ArrayLiteral");
1651
1652 Handle<FixedArray> constant_elements = expr->constant_elements();
1653 bool has_fast_elements =
1654 IsFastObjectElementsKind(expr->constant_elements_kind());
1655 Handle<FixedArrayBase> constant_elements_values(
1656 FixedArrayBase::cast(constant_elements->get(1)));
1657
1658 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1659 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1660 // If the only customer of allocation sites is transitioning, then
1661 // we can turn it off if we don't have anywhere else to transition to.
1662 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1663 }
1664
1665 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1666 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1667 __ mov(r1, Operand(constant_elements));
1668 if (MustCreateArrayLiteralWithRuntime(expr)) {
1669 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1670 __ Push(r3, r2, r1, r0);
1671 __ CallRuntime(Runtime::kCreateArrayLiteral);
1672 } else {
1673 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1674 __ CallStub(&stub);
1675 }
1676 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1677
1678 bool result_saved = false; // Is the result saved to the stack?
1679 ZoneList<Expression*>* subexprs = expr->values();
1680 int length = subexprs->length();
1681
1682 // Emit code to evaluate all the non-constant subexpressions and to store
1683 // them into the newly cloned array.
1684 int array_index = 0;
1685 for (; array_index < length; array_index++) {
1686 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001687 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001688
1689 // If the subexpression is a literal or a simple materialized literal it
1690 // is already set in the cloned array.
1691 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1692
1693 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001694 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001695 result_saved = true;
1696 }
1697 VisitForAccumulatorValue(subexpr);
1698
1699 __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1700 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1701 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1702 Handle<Code> ic =
1703 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1704 CallIC(ic);
1705
1706 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1707 }
1708
1709 // In case the array literal contains spread expressions it has two parts. The
1710 // first part is the "static" array which has a literal index is handled
1711 // above. The second part is the part after the first spread expression
1712 // (inclusive) and these elements gets appended to the array. Note that the
1713 // number elements an iterable produces is unknown ahead of time.
1714 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001715 PopOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 result_saved = false;
1717 }
1718 for (; array_index < length; array_index++) {
1719 Expression* subexpr = subexprs->at(array_index);
1720
Ben Murdoch097c5b22016-05-18 11:27:45 +01001721 PushOperand(r0);
1722 DCHECK(!subexpr->IsSpread());
1723 VisitForStackValue(subexpr);
1724 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001725
1726 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1727 }
1728
1729 if (result_saved) {
1730 context()->PlugTOS();
1731 } else {
1732 context()->Plug(r0);
1733 }
1734}
1735
1736
1737void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1738 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1739
1740 Comment cmnt(masm_, "[ Assignment");
1741 SetExpressionPosition(expr, INSERT_BREAK);
1742
1743 Property* property = expr->target()->AsProperty();
1744 LhsKind assign_type = Property::GetAssignType(property);
1745
1746 // Evaluate LHS expression.
1747 switch (assign_type) {
1748 case VARIABLE:
1749 // Nothing to do here.
1750 break;
1751 case NAMED_PROPERTY:
1752 if (expr->is_compound()) {
1753 // We need the receiver both on the stack and in the register.
1754 VisitForStackValue(property->obj());
1755 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1756 } else {
1757 VisitForStackValue(property->obj());
1758 }
1759 break;
1760 case NAMED_SUPER_PROPERTY:
1761 VisitForStackValue(
1762 property->obj()->AsSuperPropertyReference()->this_var());
1763 VisitForAccumulatorValue(
1764 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001765 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001766 if (expr->is_compound()) {
1767 const Register scratch = r1;
1768 __ ldr(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001769 PushOperand(scratch);
1770 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001771 }
1772 break;
1773 case KEYED_SUPER_PROPERTY:
1774 VisitForStackValue(
1775 property->obj()->AsSuperPropertyReference()->this_var());
1776 VisitForStackValue(
1777 property->obj()->AsSuperPropertyReference()->home_object());
1778 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001779 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780 if (expr->is_compound()) {
1781 const Register scratch = r1;
1782 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001783 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001784 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001785 PushOperand(scratch);
1786 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001787 }
1788 break;
1789 case KEYED_PROPERTY:
1790 if (expr->is_compound()) {
1791 VisitForStackValue(property->obj());
1792 VisitForStackValue(property->key());
1793 __ ldr(LoadDescriptor::ReceiverRegister(),
1794 MemOperand(sp, 1 * kPointerSize));
1795 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1796 } else {
1797 VisitForStackValue(property->obj());
1798 VisitForStackValue(property->key());
1799 }
1800 break;
1801 }
1802
1803 // For compound assignments we need another deoptimization point after the
1804 // variable/property load.
1805 if (expr->is_compound()) {
1806 { AccumulatorValueContext context(this);
1807 switch (assign_type) {
1808 case VARIABLE:
1809 EmitVariableLoad(expr->target()->AsVariableProxy());
1810 PrepareForBailout(expr->target(), TOS_REG);
1811 break;
1812 case NAMED_PROPERTY:
1813 EmitNamedPropertyLoad(property);
1814 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1815 break;
1816 case NAMED_SUPER_PROPERTY:
1817 EmitNamedSuperPropertyLoad(property);
1818 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1819 break;
1820 case KEYED_SUPER_PROPERTY:
1821 EmitKeyedSuperPropertyLoad(property);
1822 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1823 break;
1824 case KEYED_PROPERTY:
1825 EmitKeyedPropertyLoad(property);
1826 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1827 break;
1828 }
1829 }
1830
1831 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001832 PushOperand(r0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001833 VisitForAccumulatorValue(expr->value());
1834
1835 AccumulatorValueContext context(this);
1836 if (ShouldInlineSmiCase(op)) {
1837 EmitInlineSmiBinaryOp(expr->binary_operation(),
1838 op,
1839 expr->target(),
1840 expr->value());
1841 } else {
1842 EmitBinaryOp(expr->binary_operation(), op);
1843 }
1844
1845 // Deoptimization point in case the binary operation may have side effects.
1846 PrepareForBailout(expr->binary_operation(), TOS_REG);
1847 } else {
1848 VisitForAccumulatorValue(expr->value());
1849 }
1850
1851 SetExpressionPosition(expr);
1852
1853 // Store the value.
1854 switch (assign_type) {
1855 case VARIABLE:
1856 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1857 expr->op(), expr->AssignmentSlot());
1858 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1859 context()->Plug(r0);
1860 break;
1861 case NAMED_PROPERTY:
1862 EmitNamedPropertyAssignment(expr);
1863 break;
1864 case NAMED_SUPER_PROPERTY:
1865 EmitNamedSuperPropertyStore(property);
1866 context()->Plug(r0);
1867 break;
1868 case KEYED_SUPER_PROPERTY:
1869 EmitKeyedSuperPropertyStore(property);
1870 context()->Plug(r0);
1871 break;
1872 case KEYED_PROPERTY:
1873 EmitKeyedPropertyAssignment(expr);
1874 break;
1875 }
1876}
1877
1878
1879void FullCodeGenerator::VisitYield(Yield* expr) {
1880 Comment cmnt(masm_, "[ Yield");
1881 SetExpressionPosition(expr);
1882
1883 // Evaluate yielded value first; the initial iterator definition depends on
1884 // this. It stays on the stack while we update the iterator.
1885 VisitForStackValue(expr->expression());
1886
1887 switch (expr->yield_kind()) {
1888 case Yield::kSuspend:
1889 // Pop value from top-of-stack slot; box result into result register.
1890 EmitCreateIteratorResult(false);
1891 __ push(result_register());
1892 // Fall through.
1893 case Yield::kInitial: {
1894 Label suspend, continuation, post_runtime, resume;
1895
1896 __ jmp(&suspend);
1897 __ bind(&continuation);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001898 // When we arrive here, the stack top is the resume mode and
1899 // result_register() holds the input value (the argument given to the
1900 // respective resume operation).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001901 __ RecordGeneratorContinuation();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001902 __ pop(r1);
1903 __ cmp(r1, Operand(Smi::FromInt(JSGeneratorObject::RETURN)));
1904 __ b(ne, &resume);
1905 __ push(result_register());
1906 EmitCreateIteratorResult(true);
1907 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001908
1909 __ bind(&suspend);
1910 VisitForAccumulatorValue(expr->generator_object());
1911 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1912 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1913 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1914 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1915 __ mov(r1, cp);
1916 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1917 kLRHasBeenSaved, kDontSaveFPRegs);
1918 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1919 __ cmp(sp, r1);
1920 __ b(eq, &post_runtime);
1921 __ push(r0); // generator object
1922 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1923 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1924 __ bind(&post_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001925 PopOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001926 EmitReturnSequence();
1927
1928 __ bind(&resume);
1929 context()->Plug(result_register());
1930 break;
1931 }
1932
1933 case Yield::kFinal: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001934 // Pop value from top-of-stack slot, box result into result register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001935 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001936 EmitCreateIteratorResult(true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001937 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001938 break;
1939 }
1940
Ben Murdoch097c5b22016-05-18 11:27:45 +01001941 case Yield::kDelegating:
1942 UNREACHABLE();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001943 }
1944}
1945
1946
1947void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1948 Expression *value,
1949 JSGeneratorObject::ResumeMode resume_mode) {
1950 // The value stays in r0, and is ultimately read by the resumed generator, as
1951 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
1952 // is read to throw the value when the resumed generator is already closed.
1953 // r1 will hold the generator object until the activation has been resumed.
1954 VisitForStackValue(generator);
1955 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001956 PopOperand(r1);
1957
1958 // Store input value into generator object.
1959 __ str(result_register(),
1960 FieldMemOperand(r1, JSGeneratorObject::kInputOffset));
1961 __ mov(r2, result_register());
1962 __ RecordWriteField(r1, JSGeneratorObject::kInputOffset, r2, r3,
1963 kLRHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001964
1965 // Load suspended function and context.
1966 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
1967 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
1968
1969 // Load receiver and store as the first argument.
1970 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
1971 __ push(r2);
1972
1973 // Push holes for the rest of the arguments to the generator function.
1974 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1975 __ ldr(r3,
1976 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1977 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
1978 Label push_argument_holes, push_frame;
1979 __ bind(&push_argument_holes);
1980 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
1981 __ b(mi, &push_frame);
1982 __ push(r2);
1983 __ jmp(&push_argument_holes);
1984
1985 // Enter a new JavaScript frame, and initialize its slots as they were when
1986 // the generator was suspended.
1987 Label resume_frame, done;
1988 __ bind(&push_frame);
1989 __ bl(&resume_frame);
1990 __ jmp(&done);
1991 __ bind(&resume_frame);
1992 // lr = return address.
1993 // fp = caller's frame pointer.
1994 // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
1995 // cp = callee's context,
1996 // r4 = callee's JS function.
1997 __ PushFixedFrame(r4);
1998 // Adjust FP to point to saved FP.
1999 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2000
2001 // Load the operand stack size.
2002 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2003 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2004 __ SmiUntag(r3);
2005
2006 // If we are sending a value and there is no operand stack, we can jump back
2007 // in directly.
2008 if (resume_mode == JSGeneratorObject::NEXT) {
2009 Label slow_resume;
2010 __ cmp(r3, Operand(0));
2011 __ b(ne, &slow_resume);
2012 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2013
2014 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2015 if (FLAG_enable_embedded_constant_pool) {
2016 // Load the new code object's constant pool pointer.
2017 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
2018 }
2019
2020 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2021 __ SmiUntag(r2);
2022 __ add(r3, r3, r2);
2023 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2024 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002025 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002026 __ Jump(r3);
2027 }
2028 __ bind(&slow_resume);
2029 }
2030
2031 // Otherwise, we push holes for the operand stack and call the runtime to fix
2032 // up the stack and the handlers.
2033 Label push_operand_holes, call_resume;
2034 __ bind(&push_operand_holes);
2035 __ sub(r3, r3, Operand(1), SetCC);
2036 __ b(mi, &call_resume);
2037 __ push(r2);
2038 __ b(&push_operand_holes);
2039 __ bind(&call_resume);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002040 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002041 DCHECK(!result_register().is(r1));
2042 __ Push(r1, result_register());
2043 __ Push(Smi::FromInt(resume_mode));
2044 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2045 // Not reached: the runtime call returns elsewhere.
2046 __ stop("not-reached");
2047
2048 __ bind(&done);
2049 context()->Plug(result_register());
2050}
2051
Ben Murdoch097c5b22016-05-18 11:27:45 +01002052void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
2053 OperandStackDepthIncrement(2);
2054 __ Push(reg1, reg2);
2055}
2056
2057void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
2058 OperandStackDepthDecrement(2);
2059 __ Pop(reg1, reg2);
2060}
2061
2062void FullCodeGenerator::EmitOperandStackDepthCheck() {
2063 if (FLAG_debug_code) {
2064 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
2065 operand_stack_depth_ * kPointerSize;
2066 __ sub(r0, fp, sp);
2067 __ cmp(r0, Operand(expected_diff));
2068 __ Assert(eq, kUnexpectedStackDepth);
2069 }
2070}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002071
2072void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2073 Label allocate, done_allocate;
2074
2075 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate, TAG_OBJECT);
2076 __ b(&done_allocate);
2077
2078 __ bind(&allocate);
2079 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2080 __ CallRuntime(Runtime::kAllocateInNewSpace);
2081
2082 __ bind(&done_allocate);
2083 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
2084 __ pop(r2);
2085 __ LoadRoot(r3,
2086 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2087 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2088 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2089 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2090 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2091 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
2092 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
2093}
2094
2095
2096void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2097 SetExpressionPosition(prop);
2098 Literal* key = prop->key()->AsLiteral();
2099 DCHECK(!prop->IsSuperAccess());
2100
2101 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2102 __ mov(LoadDescriptor::SlotRegister(),
2103 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002104 CallLoadIC(NOT_INSIDE_TYPEOF);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002105}
2106
2107
2108void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2109 Token::Value op,
2110 Expression* left_expr,
2111 Expression* right_expr) {
2112 Label done, smi_case, stub_call;
2113
2114 Register scratch1 = r2;
2115 Register scratch2 = r3;
2116
2117 // Get the arguments.
2118 Register left = r1;
2119 Register right = r0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002120 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002121
2122 // Perform combined smi check on both operands.
2123 __ orr(scratch1, left, Operand(right));
2124 STATIC_ASSERT(kSmiTag == 0);
2125 JumpPatchSite patch_site(masm_);
2126 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2127
2128 __ bind(&stub_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002129 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002130 CallIC(code, expr->BinaryOperationFeedbackId());
2131 patch_site.EmitPatchInfo();
2132 __ jmp(&done);
2133
2134 __ bind(&smi_case);
2135 // Smi case. This code works the same way as the smi-smi case in the type
2136 // recording binary operation stub, see
2137 switch (op) {
2138 case Token::SAR:
2139 __ GetLeastBitsFromSmi(scratch1, right, 5);
2140 __ mov(right, Operand(left, ASR, scratch1));
2141 __ bic(right, right, Operand(kSmiTagMask));
2142 break;
2143 case Token::SHL: {
2144 __ SmiUntag(scratch1, left);
2145 __ GetLeastBitsFromSmi(scratch2, right, 5);
2146 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2147 __ TrySmiTag(right, scratch1, &stub_call);
2148 break;
2149 }
2150 case Token::SHR: {
2151 __ SmiUntag(scratch1, left);
2152 __ GetLeastBitsFromSmi(scratch2, right, 5);
2153 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2154 __ tst(scratch1, Operand(0xc0000000));
2155 __ b(ne, &stub_call);
2156 __ SmiTag(right, scratch1);
2157 break;
2158 }
2159 case Token::ADD:
2160 __ add(scratch1, left, Operand(right), SetCC);
2161 __ b(vs, &stub_call);
2162 __ mov(right, scratch1);
2163 break;
2164 case Token::SUB:
2165 __ sub(scratch1, left, Operand(right), SetCC);
2166 __ b(vs, &stub_call);
2167 __ mov(right, scratch1);
2168 break;
2169 case Token::MUL: {
2170 __ SmiUntag(ip, right);
2171 __ smull(scratch1, scratch2, left, ip);
2172 __ mov(ip, Operand(scratch1, ASR, 31));
2173 __ cmp(ip, Operand(scratch2));
2174 __ b(ne, &stub_call);
2175 __ cmp(scratch1, Operand::Zero());
2176 __ mov(right, Operand(scratch1), LeaveCC, ne);
2177 __ b(ne, &done);
2178 __ add(scratch2, right, Operand(left), SetCC);
2179 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2180 __ b(mi, &stub_call);
2181 break;
2182 }
2183 case Token::BIT_OR:
2184 __ orr(right, left, Operand(right));
2185 break;
2186 case Token::BIT_AND:
2187 __ and_(right, left, Operand(right));
2188 break;
2189 case Token::BIT_XOR:
2190 __ eor(right, left, Operand(right));
2191 break;
2192 default:
2193 UNREACHABLE();
2194 }
2195
2196 __ bind(&done);
2197 context()->Plug(r0);
2198}
2199
2200
2201void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002202 for (int i = 0; i < lit->properties()->length(); i++) {
2203 ObjectLiteral::Property* property = lit->properties()->at(i);
2204 Expression* value = property->value();
2205
Ben Murdoch097c5b22016-05-18 11:27:45 +01002206 Register scratch = r1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002207 if (property->is_static()) {
2208 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
2209 } else {
2210 __ ldr(scratch, MemOperand(sp, 0)); // prototype
2211 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002212 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002213 EmitPropertyKey(property, lit->GetIdForProperty(i));
2214
2215 // The static prototype property is read only. We handle the non computed
2216 // property name case in the parser. Since this is the only case where we
2217 // need to check for an own read only property we special case this so we do
2218 // not need to do this for every property.
2219 if (property->is_static() && property->is_computed_name()) {
2220 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2221 __ push(r0);
2222 }
2223
2224 VisitForStackValue(value);
2225 if (NeedsHomeObject(value)) {
2226 EmitSetHomeObject(value, 2, property->GetSlot());
2227 }
2228
2229 switch (property->kind()) {
2230 case ObjectLiteral::Property::CONSTANT:
2231 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2232 case ObjectLiteral::Property::PROTOTYPE:
2233 UNREACHABLE();
2234 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002235 PushOperand(Smi::FromInt(DONT_ENUM));
2236 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2237 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002238 break;
2239
2240 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002241 PushOperand(Smi::FromInt(DONT_ENUM));
2242 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002243 break;
2244
2245 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002246 PushOperand(Smi::FromInt(DONT_ENUM));
2247 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002248 break;
2249
2250 default:
2251 UNREACHABLE();
2252 }
2253 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002254}
2255
2256
2257void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002258 PopOperand(r1);
2259 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002260 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2261 CallIC(code, expr->BinaryOperationFeedbackId());
2262 patch_site.EmitPatchInfo();
2263 context()->Plug(r0);
2264}
2265
2266
2267void FullCodeGenerator::EmitAssignment(Expression* expr,
2268 FeedbackVectorSlot slot) {
2269 DCHECK(expr->IsValidReferenceExpressionOrThis());
2270
2271 Property* prop = expr->AsProperty();
2272 LhsKind assign_type = Property::GetAssignType(prop);
2273
2274 switch (assign_type) {
2275 case VARIABLE: {
2276 Variable* var = expr->AsVariableProxy()->var();
2277 EffectContext context(this);
2278 EmitVariableAssignment(var, Token::ASSIGN, slot);
2279 break;
2280 }
2281 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002282 PushOperand(r0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002283 VisitForAccumulatorValue(prop->obj());
2284 __ Move(StoreDescriptor::ReceiverRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002285 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002286 __ mov(StoreDescriptor::NameRegister(),
2287 Operand(prop->key()->AsLiteral()->value()));
2288 EmitLoadStoreICSlot(slot);
2289 CallStoreIC();
2290 break;
2291 }
2292 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002293 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2295 VisitForAccumulatorValue(
2296 prop->obj()->AsSuperPropertyReference()->home_object());
2297 // stack: value, this; r0: home_object
2298 Register scratch = r2;
2299 Register scratch2 = r3;
2300 __ mov(scratch, result_register()); // home_object
2301 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2302 __ ldr(scratch2, MemOperand(sp, 0)); // this
2303 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2304 __ str(scratch, MemOperand(sp, 0)); // home_object
2305 // stack: this, home_object; r0: value
2306 EmitNamedSuperPropertyStore(prop);
2307 break;
2308 }
2309 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002310 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002311 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2312 VisitForStackValue(
2313 prop->obj()->AsSuperPropertyReference()->home_object());
2314 VisitForAccumulatorValue(prop->key());
2315 Register scratch = r2;
2316 Register scratch2 = r3;
2317 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2318 // stack: value, this, home_object; r0: key, r3: value
2319 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2320 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2321 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2322 __ str(scratch, MemOperand(sp, kPointerSize));
2323 __ str(r0, MemOperand(sp, 0));
2324 __ Move(r0, scratch2);
2325 // stack: this, home_object, key; r0: value.
2326 EmitKeyedSuperPropertyStore(prop);
2327 break;
2328 }
2329 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002330 PushOperand(r0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002331 VisitForStackValue(prop->obj());
2332 VisitForAccumulatorValue(prop->key());
2333 __ Move(StoreDescriptor::NameRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002334 PopOperands(StoreDescriptor::ValueRegister(),
2335 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002336 EmitLoadStoreICSlot(slot);
2337 Handle<Code> ic =
2338 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2339 CallIC(ic);
2340 break;
2341 }
2342 }
2343 context()->Plug(r0);
2344}
2345
2346
2347void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2348 Variable* var, MemOperand location) {
2349 __ str(result_register(), location);
2350 if (var->IsContextSlot()) {
2351 // RecordWrite may destroy all its register arguments.
2352 __ mov(r3, result_register());
2353 int offset = Context::SlotOffset(var->index());
2354 __ RecordWriteContextSlot(
2355 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2356 }
2357}
2358
2359
2360void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2361 FeedbackVectorSlot slot) {
2362 if (var->IsUnallocated()) {
2363 // Global var, const, or let.
2364 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2365 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2366 EmitLoadStoreICSlot(slot);
2367 CallStoreIC();
2368
2369 } else if (var->mode() == LET && op != Token::INIT) {
2370 // Non-initializing assignment to let variable needs a write barrier.
2371 DCHECK(!var->IsLookupSlot());
2372 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2373 Label assign;
2374 MemOperand location = VarOperand(var, r1);
2375 __ ldr(r3, location);
2376 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2377 __ b(ne, &assign);
2378 __ mov(r3, Operand(var->name()));
2379 __ push(r3);
2380 __ CallRuntime(Runtime::kThrowReferenceError);
2381 // Perform the assignment.
2382 __ bind(&assign);
2383 EmitStoreToStackLocalOrContextSlot(var, location);
2384
2385 } else if (var->mode() == CONST && op != Token::INIT) {
2386 // Assignment to const variable needs a write barrier.
2387 DCHECK(!var->IsLookupSlot());
2388 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2389 Label const_error;
2390 MemOperand location = VarOperand(var, r1);
2391 __ ldr(r3, location);
2392 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2393 __ b(ne, &const_error);
2394 __ mov(r3, Operand(var->name()));
2395 __ push(r3);
2396 __ CallRuntime(Runtime::kThrowReferenceError);
2397 __ bind(&const_error);
2398 __ CallRuntime(Runtime::kThrowConstAssignError);
2399
2400 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2401 // Initializing assignment to const {this} needs a write barrier.
2402 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2403 Label uninitialized_this;
2404 MemOperand location = VarOperand(var, r1);
2405 __ ldr(r3, location);
2406 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2407 __ b(eq, &uninitialized_this);
2408 __ mov(r0, Operand(var->name()));
2409 __ Push(r0);
2410 __ CallRuntime(Runtime::kThrowReferenceError);
2411 __ bind(&uninitialized_this);
2412 EmitStoreToStackLocalOrContextSlot(var, location);
2413
2414 } else if (!var->is_const_mode() ||
2415 (var->mode() == CONST && op == Token::INIT)) {
2416 if (var->IsLookupSlot()) {
2417 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002418 __ Push(var->name());
2419 __ Push(r0);
2420 __ CallRuntime(is_strict(language_mode())
2421 ? Runtime::kStoreLookupSlot_Strict
2422 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002423 } else {
2424 // Assignment to var or initializing assignment to let/const in harmony
2425 // mode.
2426 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2427 MemOperand location = VarOperand(var, r1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002428 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002429 // Check for an uninitialized let binding.
2430 __ ldr(r2, location);
2431 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2432 __ Check(eq, kLetBindingReInitialization);
2433 }
2434 EmitStoreToStackLocalOrContextSlot(var, location);
2435 }
2436
2437 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2438 // Const initializers need a write barrier.
2439 DCHECK(!var->IsParameter()); // No const parameters.
2440 if (var->IsLookupSlot()) {
2441 __ push(r0);
2442 __ mov(r0, Operand(var->name()));
2443 __ Push(cp, r0); // Context and name.
2444 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2445 } else {
2446 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2447 Label skip;
2448 MemOperand location = VarOperand(var, r1);
2449 __ ldr(r2, location);
2450 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2451 __ b(ne, &skip);
2452 EmitStoreToStackLocalOrContextSlot(var, location);
2453 __ bind(&skip);
2454 }
2455
2456 } else {
2457 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2458 if (is_strict(language_mode())) {
2459 __ CallRuntime(Runtime::kThrowConstAssignError);
2460 }
2461 // Silently ignore store in sloppy mode.
2462 }
2463}
2464
2465
2466void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2467 // Assignment to a property, using a named store IC.
2468 Property* prop = expr->target()->AsProperty();
2469 DCHECK(prop != NULL);
2470 DCHECK(prop->key()->IsLiteral());
2471
2472 __ mov(StoreDescriptor::NameRegister(),
2473 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002474 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002475 EmitLoadStoreICSlot(expr->AssignmentSlot());
2476 CallStoreIC();
2477
2478 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2479 context()->Plug(r0);
2480}
2481
2482
2483void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2484 // Assignment to named property of super.
2485 // r0 : value
2486 // stack : receiver ('this'), home_object
2487 DCHECK(prop != NULL);
2488 Literal* key = prop->key()->AsLiteral();
2489 DCHECK(key != NULL);
2490
Ben Murdoch097c5b22016-05-18 11:27:45 +01002491 PushOperand(key->value());
2492 PushOperand(r0);
2493 CallRuntimeWithOperands(is_strict(language_mode())
2494 ? Runtime::kStoreToSuper_Strict
2495 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002496}
2497
2498
2499void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2500 // Assignment to named property of super.
2501 // r0 : value
2502 // stack : receiver ('this'), home_object, key
2503 DCHECK(prop != NULL);
2504
Ben Murdoch097c5b22016-05-18 11:27:45 +01002505 PushOperand(r0);
2506 CallRuntimeWithOperands(is_strict(language_mode())
2507 ? Runtime::kStoreKeyedToSuper_Strict
2508 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002509}
2510
2511
2512void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2513 // Assignment to a property, using a keyed store IC.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002514 PopOperands(StoreDescriptor::ReceiverRegister(),
2515 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002516 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2517
2518 Handle<Code> ic =
2519 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2520 EmitLoadStoreICSlot(expr->AssignmentSlot());
2521 CallIC(ic);
2522
2523 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2524 context()->Plug(r0);
2525}
2526
2527
2528void FullCodeGenerator::VisitProperty(Property* expr) {
2529 Comment cmnt(masm_, "[ Property");
2530 SetExpressionPosition(expr);
2531
2532 Expression* key = expr->key();
2533
2534 if (key->IsPropertyName()) {
2535 if (!expr->IsSuperAccess()) {
2536 VisitForAccumulatorValue(expr->obj());
2537 __ Move(LoadDescriptor::ReceiverRegister(), r0);
2538 EmitNamedPropertyLoad(expr);
2539 } else {
2540 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2541 VisitForStackValue(
2542 expr->obj()->AsSuperPropertyReference()->home_object());
2543 EmitNamedSuperPropertyLoad(expr);
2544 }
2545 } else {
2546 if (!expr->IsSuperAccess()) {
2547 VisitForStackValue(expr->obj());
2548 VisitForAccumulatorValue(expr->key());
2549 __ Move(LoadDescriptor::NameRegister(), r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002550 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002551 EmitKeyedPropertyLoad(expr);
2552 } else {
2553 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2554 VisitForStackValue(
2555 expr->obj()->AsSuperPropertyReference()->home_object());
2556 VisitForStackValue(expr->key());
2557 EmitKeyedSuperPropertyLoad(expr);
2558 }
2559 }
2560 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2561 context()->Plug(r0);
2562}
2563
2564
2565void FullCodeGenerator::CallIC(Handle<Code> code,
2566 TypeFeedbackId ast_id) {
2567 ic_total_count_++;
2568 // All calls must have a predictable size in full-codegen code to ensure that
2569 // the debugger can patch them correctly.
2570 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2571 NEVER_INLINE_TARGET_ADDRESS);
2572}
2573
2574
2575// Code common for calls using the IC.
2576void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2577 Expression* callee = expr->expression();
2578
2579 // Get the target function.
2580 ConvertReceiverMode convert_mode;
2581 if (callee->IsVariableProxy()) {
2582 { StackValueContext context(this);
2583 EmitVariableLoad(callee->AsVariableProxy());
2584 PrepareForBailout(callee, NO_REGISTERS);
2585 }
2586 // Push undefined as receiver. This is patched in the method prologue if it
2587 // is a sloppy mode method.
2588 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002589 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002590 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2591 } else {
2592 // Load the function from the receiver.
2593 DCHECK(callee->IsProperty());
2594 DCHECK(!callee->AsProperty()->IsSuperAccess());
2595 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2596 EmitNamedPropertyLoad(callee->AsProperty());
2597 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2598 // Push the target function under the receiver.
2599 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002600 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002601 __ str(r0, MemOperand(sp, kPointerSize));
2602 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2603 }
2604
2605 EmitCall(expr, convert_mode);
2606}
2607
2608
2609void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2610 Expression* callee = expr->expression();
2611 DCHECK(callee->IsProperty());
2612 Property* prop = callee->AsProperty();
2613 DCHECK(prop->IsSuperAccess());
2614 SetExpressionPosition(prop);
2615
2616 Literal* key = prop->key()->AsLiteral();
2617 DCHECK(!key->value()->IsSmi());
2618 // Load the function from the receiver.
2619 const Register scratch = r1;
2620 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2621 VisitForStackValue(super_ref->home_object());
2622 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002623 PushOperand(r0);
2624 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002625 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002626 PushOperand(scratch);
2627 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002628
2629 // Stack here:
2630 // - home_object
2631 // - this (receiver)
2632 // - this (receiver) <-- LoadFromSuper will pop here and below.
2633 // - home_object
2634 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002635 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002636
2637 // Replace home_object with target function.
2638 __ str(r0, MemOperand(sp, kPointerSize));
2639
2640 // Stack here:
2641 // - target function
2642 // - this (receiver)
2643 EmitCall(expr);
2644}
2645
2646
2647// Code common for calls using the IC.
2648void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2649 Expression* key) {
2650 // Load the key.
2651 VisitForAccumulatorValue(key);
2652
2653 Expression* callee = expr->expression();
2654
2655 // Load the function from the receiver.
2656 DCHECK(callee->IsProperty());
2657 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2658 __ Move(LoadDescriptor::NameRegister(), r0);
2659 EmitKeyedPropertyLoad(callee->AsProperty());
2660 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2661
2662 // Push the target function under the receiver.
2663 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002664 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002665 __ str(r0, MemOperand(sp, kPointerSize));
2666
2667 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2668}
2669
2670
2671void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2672 Expression* callee = expr->expression();
2673 DCHECK(callee->IsProperty());
2674 Property* prop = callee->AsProperty();
2675 DCHECK(prop->IsSuperAccess());
2676
2677 SetExpressionPosition(prop);
2678 // Load the function from the receiver.
2679 const Register scratch = r1;
2680 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2681 VisitForStackValue(super_ref->home_object());
2682 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002683 PushOperand(r0);
2684 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002685 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002686 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002687 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002688
2689 // Stack here:
2690 // - home_object
2691 // - this (receiver)
2692 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2693 // - home_object
2694 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002695 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002696
2697 // Replace home_object with target function.
2698 __ str(r0, MemOperand(sp, kPointerSize));
2699
2700 // Stack here:
2701 // - target function
2702 // - this (receiver)
2703 EmitCall(expr);
2704}
2705
2706
2707void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2708 // Load the arguments.
2709 ZoneList<Expression*>* args = expr->arguments();
2710 int arg_count = args->length();
2711 for (int i = 0; i < arg_count; i++) {
2712 VisitForStackValue(args->at(i));
2713 }
2714
2715 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2716 SetCallPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002717 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2718 if (FLAG_trace) {
2719 __ CallRuntime(Runtime::kTraceTailCall);
2720 }
2721 // Update profiling counters before the tail call since we will
2722 // not return to this function.
2723 EmitProfilingCounterHandlingForReturnSequence(true);
2724 }
2725 Handle<Code> ic =
2726 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2727 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002728 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2729 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2730 // Don't assign a type feedback id to the IC, since type feedback is provided
2731 // by the vector above.
2732 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002733 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002734
2735 RecordJSReturnSite(expr);
2736 // Restore context register.
2737 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2738 context()->DropAndPlug(1, r0);
2739}
2740
2741
2742void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2743 // r4: copy of the first argument or undefined if it doesn't exist.
2744 if (arg_count > 0) {
2745 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2746 } else {
2747 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2748 }
2749
2750 // r3: the receiver of the enclosing function.
2751 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2752
2753 // r2: language mode.
2754 __ mov(r2, Operand(Smi::FromInt(language_mode())));
2755
2756 // r1: the start position of the scope the calls resides in.
2757 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2758
2759 // Do the runtime call.
2760 __ Push(r4, r3, r2, r1);
2761 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2762}
2763
2764
2765// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2766void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2767 VariableProxy* callee = expr->expression()->AsVariableProxy();
2768 if (callee->var()->IsLookupSlot()) {
2769 Label slow, done;
2770 SetExpressionPosition(callee);
2771 // Generate code for loading from variables potentially shadowed
2772 // by eval-introduced variables.
2773 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2774
2775 __ bind(&slow);
2776 // Call the runtime to find the function to call (returned in r0)
2777 // and the object holding it (returned in edx).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002778 __ Push(callee->name());
2779 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2780 PushOperands(r0, r1); // Function, receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002781 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2782
2783 // If fast case code has been generated, emit code to push the
2784 // function and receiver and have the slow path jump around this
2785 // code.
2786 if (done.is_linked()) {
2787 Label call;
2788 __ b(&call);
2789 __ bind(&done);
2790 // Push function.
2791 __ push(r0);
2792 // The receiver is implicitly the global receiver. Indicate this
2793 // by passing the hole to the call function stub.
2794 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2795 __ push(r1);
2796 __ bind(&call);
2797 }
2798 } else {
2799 VisitForStackValue(callee);
2800 // refEnv.WithBaseObject()
2801 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002802 PushOperand(r2); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002803 }
2804}
2805
2806
2807void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2808 // In a call to eval, we first call
2809 // RuntimeHidden_asResolvePossiblyDirectEval to resolve the function we need
2810 // to call. Then we call the resolved function using the given arguments.
2811 ZoneList<Expression*>* args = expr->arguments();
2812 int arg_count = args->length();
2813
2814 PushCalleeAndWithBaseObject(expr);
2815
2816 // Push the arguments.
2817 for (int i = 0; i < arg_count; i++) {
2818 VisitForStackValue(args->at(i));
2819 }
2820
2821 // Push a copy of the function (found below the arguments) and
2822 // resolve eval.
2823 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2824 __ push(r1);
2825 EmitResolvePossiblyDirectEval(arg_count);
2826
2827 // Touch up the stack with the resolved function.
2828 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2829
2830 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2831
2832 // Record source position for debugger.
2833 SetCallPosition(expr);
2834 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2835 __ mov(r0, Operand(arg_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002836 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2837 expr->tail_call_mode()),
2838 RelocInfo::CODE_TARGET);
2839 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002840 RecordJSReturnSite(expr);
2841 // Restore context register.
2842 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2843 context()->DropAndPlug(1, r0);
2844}
2845
2846
2847void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2848 Comment cmnt(masm_, "[ CallNew");
2849 // According to ECMA-262, section 11.2.2, page 44, the function
2850 // expression in new calls must be evaluated before the
2851 // arguments.
2852
2853 // Push constructor on the stack. If it's not a function it's used as
2854 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2855 // ignored.
2856 DCHECK(!expr->expression()->IsSuperPropertyReference());
2857 VisitForStackValue(expr->expression());
2858
2859 // Push the arguments ("left-to-right") on the stack.
2860 ZoneList<Expression*>* args = expr->arguments();
2861 int arg_count = args->length();
2862 for (int i = 0; i < arg_count; i++) {
2863 VisitForStackValue(args->at(i));
2864 }
2865
2866 // Call the construct call builtin that handles allocation and
2867 // constructor invocation.
2868 SetConstructCallPosition(expr);
2869
2870 // Load function and argument count into r1 and r0.
2871 __ mov(r0, Operand(arg_count));
2872 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2873
2874 // Record call targets in unoptimized code.
2875 __ EmitLoadTypeFeedbackVector(r2);
2876 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2877
2878 CallConstructStub stub(isolate());
2879 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002880 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002881 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2882 // Restore context register.
2883 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2884 context()->Plug(r0);
2885}
2886
2887
2888void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2889 SuperCallReference* super_call_ref =
2890 expr->expression()->AsSuperCallReference();
2891 DCHECK_NOT_NULL(super_call_ref);
2892
2893 // Push the super constructor target on the stack (may be null,
2894 // but the Construct builtin can deal with that properly).
2895 VisitForAccumulatorValue(super_call_ref->this_function_var());
2896 __ AssertFunction(result_register());
2897 __ ldr(result_register(),
2898 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2899 __ ldr(result_register(),
2900 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002901 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002902
2903 // Push the arguments ("left-to-right") on the stack.
2904 ZoneList<Expression*>* args = expr->arguments();
2905 int arg_count = args->length();
2906 for (int i = 0; i < arg_count; i++) {
2907 VisitForStackValue(args->at(i));
2908 }
2909
2910 // Call the construct call builtin that handles allocation and
2911 // constructor invocation.
2912 SetConstructCallPosition(expr);
2913
2914 // Load new target into r3.
2915 VisitForAccumulatorValue(super_call_ref->new_target_var());
2916 __ mov(r3, result_register());
2917
2918 // Load function and argument count into r1 and r0.
2919 __ mov(r0, Operand(arg_count));
2920 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2921
2922 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002923 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002924
2925 RecordJSReturnSite(expr);
2926
2927 // Restore context register.
2928 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2929 context()->Plug(r0);
2930}
2931
2932
2933void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2934 ZoneList<Expression*>* args = expr->arguments();
2935 DCHECK(args->length() == 1);
2936
2937 VisitForAccumulatorValue(args->at(0));
2938
2939 Label materialize_true, materialize_false;
2940 Label* if_true = NULL;
2941 Label* if_false = NULL;
2942 Label* fall_through = NULL;
2943 context()->PrepareTest(&materialize_true, &materialize_false,
2944 &if_true, &if_false, &fall_through);
2945
2946 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2947 __ SmiTst(r0);
2948 Split(eq, if_true, if_false, fall_through);
2949
2950 context()->Plug(if_true, if_false);
2951}
2952
2953
2954void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2955 ZoneList<Expression*>* args = expr->arguments();
2956 DCHECK(args->length() == 1);
2957
2958 VisitForAccumulatorValue(args->at(0));
2959
2960 Label materialize_true, materialize_false;
2961 Label* if_true = NULL;
2962 Label* if_false = NULL;
2963 Label* fall_through = NULL;
2964 context()->PrepareTest(&materialize_true, &materialize_false,
2965 &if_true, &if_false, &fall_through);
2966
2967 __ JumpIfSmi(r0, if_false);
2968 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
2969 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2970 Split(ge, if_true, if_false, fall_through);
2971
2972 context()->Plug(if_true, if_false);
2973}
2974
2975
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002976void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2977 ZoneList<Expression*>* args = expr->arguments();
2978 DCHECK(args->length() == 1);
2979
2980 VisitForAccumulatorValue(args->at(0));
2981
2982 Label materialize_true, materialize_false;
2983 Label* if_true = NULL;
2984 Label* if_false = NULL;
2985 Label* fall_through = NULL;
2986 context()->PrepareTest(&materialize_true, &materialize_false,
2987 &if_true, &if_false, &fall_through);
2988
2989 __ JumpIfSmi(r0, if_false);
2990 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2991 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2992 Split(eq, if_true, if_false, fall_through);
2993
2994 context()->Plug(if_true, if_false);
2995}
2996
2997
2998void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2999 ZoneList<Expression*>* args = expr->arguments();
3000 DCHECK(args->length() == 1);
3001
3002 VisitForAccumulatorValue(args->at(0));
3003
3004 Label materialize_true, materialize_false;
3005 Label* if_true = NULL;
3006 Label* if_false = NULL;
3007 Label* fall_through = NULL;
3008 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3009 &if_false, &fall_through);
3010
3011 __ JumpIfSmi(r0, if_false);
3012 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
3013 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3014 Split(eq, if_true, if_false, fall_through);
3015
3016 context()->Plug(if_true, if_false);
3017}
3018
3019
3020void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3021 ZoneList<Expression*>* args = expr->arguments();
3022 DCHECK(args->length() == 1);
3023
3024 VisitForAccumulatorValue(args->at(0));
3025
3026 Label materialize_true, materialize_false;
3027 Label* if_true = NULL;
3028 Label* if_false = NULL;
3029 Label* fall_through = NULL;
3030 context()->PrepareTest(&materialize_true, &materialize_false,
3031 &if_true, &if_false, &fall_through);
3032
3033 __ JumpIfSmi(r0, if_false);
3034 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3035 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3036 Split(eq, if_true, if_false, fall_through);
3037
3038 context()->Plug(if_true, if_false);
3039}
3040
3041
3042void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3043 ZoneList<Expression*>* args = expr->arguments();
3044 DCHECK(args->length() == 1);
3045
3046 VisitForAccumulatorValue(args->at(0));
3047
3048 Label materialize_true, materialize_false;
3049 Label* if_true = NULL;
3050 Label* if_false = NULL;
3051 Label* fall_through = NULL;
3052 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3053 &if_false, &fall_through);
3054
3055 __ JumpIfSmi(r0, if_false);
3056 __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
3057 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3058 Split(eq, if_true, if_false, fall_through);
3059
3060 context()->Plug(if_true, if_false);
3061}
3062
3063
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003064void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3065 ZoneList<Expression*>* args = expr->arguments();
3066 DCHECK(args->length() == 1);
3067 Label done, null, function, non_function_constructor;
3068
3069 VisitForAccumulatorValue(args->at(0));
3070
3071 // If the object is not a JSReceiver, we return null.
3072 __ JumpIfSmi(r0, &null);
3073 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3074 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
3075 // Map is now in r0.
3076 __ b(lt, &null);
3077
3078 // Return 'Function' for JSFunction objects.
3079 __ cmp(r1, Operand(JS_FUNCTION_TYPE));
3080 __ b(eq, &function);
3081
3082 // Check if the constructor in the map is a JS function.
3083 Register instance_type = r2;
3084 __ GetMapConstructor(r0, r0, r1, instance_type);
3085 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3086 __ b(ne, &non_function_constructor);
3087
3088 // r0 now contains the constructor function. Grab the
3089 // instance class name from there.
3090 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3091 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3092 __ b(&done);
3093
3094 // Functions have class 'Function'.
3095 __ bind(&function);
3096 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3097 __ jmp(&done);
3098
3099 // Objects with a non-function constructor have class 'Object'.
3100 __ bind(&non_function_constructor);
3101 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3102 __ jmp(&done);
3103
3104 // Non-JS objects have class null.
3105 __ bind(&null);
3106 __ LoadRoot(r0, Heap::kNullValueRootIndex);
3107
3108 // All done.
3109 __ bind(&done);
3110
3111 context()->Plug(r0);
3112}
3113
3114
3115void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3116 ZoneList<Expression*>* args = expr->arguments();
3117 DCHECK(args->length() == 1);
3118 VisitForAccumulatorValue(args->at(0)); // Load the object.
3119
3120 Label done;
3121 // If the object is a smi return the object.
3122 __ JumpIfSmi(r0, &done);
3123 // If the object is not a value type, return the object.
3124 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3125 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3126
3127 __ bind(&done);
3128 context()->Plug(r0);
3129}
3130
3131
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003132void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3133 ZoneList<Expression*>* args = expr->arguments();
3134 DCHECK_EQ(3, args->length());
3135
3136 Register string = r0;
3137 Register index = r1;
3138 Register value = r2;
3139
3140 VisitForStackValue(args->at(0)); // index
3141 VisitForStackValue(args->at(1)); // value
3142 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003143 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003144
3145 if (FLAG_debug_code) {
3146 __ SmiTst(value);
3147 __ Check(eq, kNonSmiValue);
3148 __ SmiTst(index);
3149 __ Check(eq, kNonSmiIndex);
3150 __ SmiUntag(index, index);
3151 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3152 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3153 __ SmiTag(index, index);
3154 }
3155
3156 __ SmiUntag(value, value);
3157 __ add(ip,
3158 string,
3159 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3160 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3161 context()->Plug(string);
3162}
3163
3164
3165void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3166 ZoneList<Expression*>* args = expr->arguments();
3167 DCHECK_EQ(3, args->length());
3168
3169 Register string = r0;
3170 Register index = r1;
3171 Register value = r2;
3172
3173 VisitForStackValue(args->at(0)); // index
3174 VisitForStackValue(args->at(1)); // value
3175 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01003176 PopOperands(index, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003177
3178 if (FLAG_debug_code) {
3179 __ SmiTst(value);
3180 __ Check(eq, kNonSmiValue);
3181 __ SmiTst(index);
3182 __ Check(eq, kNonSmiIndex);
3183 __ SmiUntag(index, index);
3184 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3185 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3186 __ SmiTag(index, index);
3187 }
3188
3189 __ SmiUntag(value, value);
3190 __ add(ip,
3191 string,
3192 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3193 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3194 __ strh(value, MemOperand(ip, index));
3195 context()->Plug(string);
3196}
3197
3198
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003199void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3200 ZoneList<Expression*>* args = expr->arguments();
3201 DCHECK_EQ(1, args->length());
3202
3203 // Load the argument into r0 and convert it.
3204 VisitForAccumulatorValue(args->at(0));
3205
3206 // Convert the object to an integer.
3207 Label done_convert;
3208 __ JumpIfSmi(r0, &done_convert);
3209 __ Push(r0);
3210 __ CallRuntime(Runtime::kToInteger);
3211 __ bind(&done_convert);
3212 context()->Plug(r0);
3213}
3214
3215
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003216void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3217 ZoneList<Expression*>* args = expr->arguments();
3218 DCHECK(args->length() == 1);
3219 VisitForAccumulatorValue(args->at(0));
3220
3221 Label done;
3222 StringCharFromCodeGenerator generator(r0, r1);
3223 generator.GenerateFast(masm_);
3224 __ jmp(&done);
3225
3226 NopRuntimeCallHelper call_helper;
3227 generator.GenerateSlow(masm_, call_helper);
3228
3229 __ bind(&done);
3230 context()->Plug(r1);
3231}
3232
3233
3234void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3235 ZoneList<Expression*>* args = expr->arguments();
3236 DCHECK(args->length() == 2);
3237 VisitForStackValue(args->at(0));
3238 VisitForAccumulatorValue(args->at(1));
3239
3240 Register object = r1;
3241 Register index = r0;
3242 Register result = r3;
3243
Ben Murdoch097c5b22016-05-18 11:27:45 +01003244 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003245
3246 Label need_conversion;
3247 Label index_out_of_range;
3248 Label done;
3249 StringCharCodeAtGenerator generator(object,
3250 index,
3251 result,
3252 &need_conversion,
3253 &need_conversion,
3254 &index_out_of_range,
3255 STRING_INDEX_IS_NUMBER);
3256 generator.GenerateFast(masm_);
3257 __ jmp(&done);
3258
3259 __ bind(&index_out_of_range);
3260 // When the index is out of range, the spec requires us to return
3261 // NaN.
3262 __ LoadRoot(result, Heap::kNanValueRootIndex);
3263 __ jmp(&done);
3264
3265 __ bind(&need_conversion);
3266 // Load the undefined value into the result register, which will
3267 // trigger conversion.
3268 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3269 __ jmp(&done);
3270
3271 NopRuntimeCallHelper call_helper;
3272 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3273
3274 __ bind(&done);
3275 context()->Plug(result);
3276}
3277
3278
3279void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3280 ZoneList<Expression*>* args = expr->arguments();
3281 DCHECK(args->length() == 2);
3282 VisitForStackValue(args->at(0));
3283 VisitForAccumulatorValue(args->at(1));
3284
3285 Register object = r1;
3286 Register index = r0;
3287 Register scratch = r3;
3288 Register result = r0;
3289
Ben Murdoch097c5b22016-05-18 11:27:45 +01003290 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003291
3292 Label need_conversion;
3293 Label index_out_of_range;
3294 Label done;
3295 StringCharAtGenerator generator(object,
3296 index,
3297 scratch,
3298 result,
3299 &need_conversion,
3300 &need_conversion,
3301 &index_out_of_range,
3302 STRING_INDEX_IS_NUMBER);
3303 generator.GenerateFast(masm_);
3304 __ jmp(&done);
3305
3306 __ bind(&index_out_of_range);
3307 // When the index is out of range, the spec requires us to return
3308 // the empty string.
3309 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3310 __ jmp(&done);
3311
3312 __ bind(&need_conversion);
3313 // Move smi zero into the result register, which will trigger
3314 // conversion.
3315 __ mov(result, Operand(Smi::FromInt(0)));
3316 __ jmp(&done);
3317
3318 NopRuntimeCallHelper call_helper;
3319 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3320
3321 __ bind(&done);
3322 context()->Plug(result);
3323}
3324
3325
3326void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3327 ZoneList<Expression*>* args = expr->arguments();
3328 DCHECK_LE(2, args->length());
3329 // Push target, receiver and arguments onto the stack.
3330 for (Expression* const arg : *args) {
3331 VisitForStackValue(arg);
3332 }
3333 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3334 // Move target to r1.
3335 int const argc = args->length() - 2;
3336 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
3337 // Call the target.
3338 __ mov(r0, Operand(argc));
3339 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003340 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003341 // Restore context register.
3342 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3343 // Discard the function left on TOS.
3344 context()->DropAndPlug(1, r0);
3345}
3346
3347
3348void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3349 ZoneList<Expression*>* args = expr->arguments();
3350 VisitForAccumulatorValue(args->at(0));
3351
3352 Label materialize_true, materialize_false;
3353 Label* if_true = NULL;
3354 Label* if_false = NULL;
3355 Label* fall_through = NULL;
3356 context()->PrepareTest(&materialize_true, &materialize_false,
3357 &if_true, &if_false, &fall_through);
3358
3359 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3360 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3361 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3362 Split(eq, if_true, if_false, fall_through);
3363
3364 context()->Plug(if_true, if_false);
3365}
3366
3367
3368void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3369 ZoneList<Expression*>* args = expr->arguments();
3370 DCHECK(args->length() == 1);
3371 VisitForAccumulatorValue(args->at(0));
3372
3373 __ AssertString(r0);
3374
3375 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3376 __ IndexFromHash(r0, r0);
3377
3378 context()->Plug(r0);
3379}
3380
3381
3382void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3383 ZoneList<Expression*>* args = expr->arguments();
3384 DCHECK_EQ(1, args->length());
3385 VisitForAccumulatorValue(args->at(0));
3386 __ AssertFunction(r0);
3387 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3388 __ ldr(r0, FieldMemOperand(r0, Map::kPrototypeOffset));
3389 context()->Plug(r0);
3390}
3391
3392
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003393void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3394 DCHECK(expr->arguments()->length() == 0);
3395 ExternalReference debug_is_active =
3396 ExternalReference::debug_is_active_address(isolate());
3397 __ mov(ip, Operand(debug_is_active));
3398 __ ldrb(r0, MemOperand(ip));
3399 __ SmiTag(r0);
3400 context()->Plug(r0);
3401}
3402
3403
3404void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3405 ZoneList<Expression*>* args = expr->arguments();
3406 DCHECK_EQ(2, args->length());
3407 VisitForStackValue(args->at(0));
3408 VisitForStackValue(args->at(1));
3409
3410 Label runtime, done;
3411
3412 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime, TAG_OBJECT);
3413 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
3414 __ pop(r3);
3415 __ pop(r2);
3416 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
3417 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3418 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3419 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
3420 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
3421 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
3422 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3423 __ b(&done);
3424
3425 __ bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003426 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003427
3428 __ bind(&done);
3429 context()->Plug(r0);
3430}
3431
3432
3433void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3434 // Push undefined as the receiver.
3435 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003436 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003437
3438 __ LoadNativeContextSlot(expr->context_index(), r0);
3439}
3440
3441
3442void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3443 ZoneList<Expression*>* args = expr->arguments();
3444 int arg_count = args->length();
3445
3446 SetCallPosition(expr);
3447 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3448 __ mov(r0, Operand(arg_count));
3449 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3450 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003451 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003452}
3453
3454
3455void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3456 ZoneList<Expression*>* args = expr->arguments();
3457 int arg_count = args->length();
3458
3459 if (expr->is_jsruntime()) {
3460 Comment cmnt(masm_, "[ CallRuntime");
3461 EmitLoadJSRuntimeFunction(expr);
3462
3463 // Push the target function under the receiver.
3464 __ ldr(ip, MemOperand(sp, 0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003465 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003466 __ str(r0, MemOperand(sp, kPointerSize));
3467
3468 // Push the arguments ("left-to-right").
3469 for (int i = 0; i < arg_count; i++) {
3470 VisitForStackValue(args->at(i));
3471 }
3472
3473 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3474 EmitCallJSRuntimeFunction(expr);
3475
3476 // Restore context register.
3477 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3478
3479 context()->DropAndPlug(1, r0);
3480
3481 } else {
3482 const Runtime::Function* function = expr->function();
3483 switch (function->function_id) {
3484#define CALL_INTRINSIC_GENERATOR(Name) \
3485 case Runtime::kInline##Name: { \
3486 Comment cmnt(masm_, "[ Inline" #Name); \
3487 return Emit##Name(expr); \
3488 }
3489 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3490#undef CALL_INTRINSIC_GENERATOR
3491 default: {
3492 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3493 // Push the arguments ("left-to-right").
3494 for (int i = 0; i < arg_count; i++) {
3495 VisitForStackValue(args->at(i));
3496 }
3497
3498 // Call the C runtime function.
3499 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3500 __ CallRuntime(expr->function(), arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003501 OperandStackDepthDecrement(arg_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003502 context()->Plug(r0);
3503 }
3504 }
3505 }
3506}
3507
3508
3509void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3510 switch (expr->op()) {
3511 case Token::DELETE: {
3512 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3513 Property* property = expr->expression()->AsProperty();
3514 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3515
3516 if (property != NULL) {
3517 VisitForStackValue(property->obj());
3518 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003519 CallRuntimeWithOperands(is_strict(language_mode())
3520 ? Runtime::kDeleteProperty_Strict
3521 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003522 context()->Plug(r0);
3523 } else if (proxy != NULL) {
3524 Variable* var = proxy->var();
3525 // Delete of an unqualified identifier is disallowed in strict mode but
3526 // "delete this" is allowed.
3527 bool is_this = var->HasThisName(isolate());
3528 DCHECK(is_sloppy(language_mode()) || is_this);
3529 if (var->IsUnallocatedOrGlobalSlot()) {
3530 __ LoadGlobalObject(r2);
3531 __ mov(r1, Operand(var->name()));
3532 __ Push(r2, r1);
3533 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3534 context()->Plug(r0);
3535 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3536 // Result of deleting non-global, non-dynamic variables is false.
3537 // The subexpression does not have side effects.
3538 context()->Plug(is_this);
3539 } else {
3540 // Non-global variable. Call the runtime to try to delete from the
3541 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003542 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003543 __ CallRuntime(Runtime::kDeleteLookupSlot);
3544 context()->Plug(r0);
3545 }
3546 } else {
3547 // Result of deleting non-property, non-variable reference is true.
3548 // The subexpression may have side effects.
3549 VisitForEffect(expr->expression());
3550 context()->Plug(true);
3551 }
3552 break;
3553 }
3554
3555 case Token::VOID: {
3556 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3557 VisitForEffect(expr->expression());
3558 context()->Plug(Heap::kUndefinedValueRootIndex);
3559 break;
3560 }
3561
3562 case Token::NOT: {
3563 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3564 if (context()->IsEffect()) {
3565 // Unary NOT has no side effects so it's only necessary to visit the
3566 // subexpression. Match the optimizing compiler by not branching.
3567 VisitForEffect(expr->expression());
3568 } else if (context()->IsTest()) {
3569 const TestContext* test = TestContext::cast(context());
3570 // The labels are swapped for the recursive call.
3571 VisitForControl(expr->expression(),
3572 test->false_label(),
3573 test->true_label(),
3574 test->fall_through());
3575 context()->Plug(test->true_label(), test->false_label());
3576 } else {
3577 // We handle value contexts explicitly rather than simply visiting
3578 // for control and plugging the control flow into the context,
3579 // because we need to prepare a pair of extra administrative AST ids
3580 // for the optimizing compiler.
3581 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3582 Label materialize_true, materialize_false, done;
3583 VisitForControl(expr->expression(),
3584 &materialize_false,
3585 &materialize_true,
3586 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003587 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003588 __ bind(&materialize_true);
3589 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3590 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3591 if (context()->IsStackValue()) __ push(r0);
3592 __ jmp(&done);
3593 __ bind(&materialize_false);
3594 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3595 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3596 if (context()->IsStackValue()) __ push(r0);
3597 __ bind(&done);
3598 }
3599 break;
3600 }
3601
3602 case Token::TYPEOF: {
3603 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3604 {
3605 AccumulatorValueContext context(this);
3606 VisitForTypeofValue(expr->expression());
3607 }
3608 __ mov(r3, r0);
3609 TypeofStub typeof_stub(isolate());
3610 __ CallStub(&typeof_stub);
3611 context()->Plug(r0);
3612 break;
3613 }
3614
3615 default:
3616 UNREACHABLE();
3617 }
3618}
3619
3620
3621void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3622 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3623
3624 Comment cmnt(masm_, "[ CountOperation");
3625
3626 Property* prop = expr->expression()->AsProperty();
3627 LhsKind assign_type = Property::GetAssignType(prop);
3628
3629 // Evaluate expression and get value.
3630 if (assign_type == VARIABLE) {
3631 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3632 AccumulatorValueContext context(this);
3633 EmitVariableLoad(expr->expression()->AsVariableProxy());
3634 } else {
3635 // Reserve space for result of postfix operation.
3636 if (expr->is_postfix() && !context()->IsEffect()) {
3637 __ mov(ip, Operand(Smi::FromInt(0)));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003638 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003639 }
3640 switch (assign_type) {
3641 case NAMED_PROPERTY: {
3642 // Put the object both on the stack and in the register.
3643 VisitForStackValue(prop->obj());
3644 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3645 EmitNamedPropertyLoad(prop);
3646 break;
3647 }
3648
3649 case NAMED_SUPER_PROPERTY: {
3650 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3651 VisitForAccumulatorValue(
3652 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003653 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003654 const Register scratch = r1;
3655 __ ldr(scratch, MemOperand(sp, kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003656 PushOperand(scratch);
3657 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003658 EmitNamedSuperPropertyLoad(prop);
3659 break;
3660 }
3661
3662 case KEYED_SUPER_PROPERTY: {
3663 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3664 VisitForStackValue(
3665 prop->obj()->AsSuperPropertyReference()->home_object());
3666 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003667 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003668 const Register scratch = r1;
3669 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003670 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003671 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003672 PushOperand(scratch);
3673 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003674 EmitKeyedSuperPropertyLoad(prop);
3675 break;
3676 }
3677
3678 case KEYED_PROPERTY: {
3679 VisitForStackValue(prop->obj());
3680 VisitForStackValue(prop->key());
3681 __ ldr(LoadDescriptor::ReceiverRegister(),
3682 MemOperand(sp, 1 * kPointerSize));
3683 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3684 EmitKeyedPropertyLoad(prop);
3685 break;
3686 }
3687
3688 case VARIABLE:
3689 UNREACHABLE();
3690 }
3691 }
3692
3693 // We need a second deoptimization point after loading the value
3694 // in case evaluating the property load my have a side effect.
3695 if (assign_type == VARIABLE) {
3696 PrepareForBailout(expr->expression(), TOS_REG);
3697 } else {
3698 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3699 }
3700
3701 // Inline smi case if we are in a loop.
3702 Label stub_call, done;
3703 JumpPatchSite patch_site(masm_);
3704
3705 int count_value = expr->op() == Token::INC ? 1 : -1;
3706 if (ShouldInlineSmiCase(expr->op())) {
3707 Label slow;
3708 patch_site.EmitJumpIfNotSmi(r0, &slow);
3709
3710 // Save result for postfix expressions.
3711 if (expr->is_postfix()) {
3712 if (!context()->IsEffect()) {
3713 // Save the result on the stack. If we have a named or keyed property
3714 // we store the result under the receiver that is currently on top
3715 // of the stack.
3716 switch (assign_type) {
3717 case VARIABLE:
3718 __ push(r0);
3719 break;
3720 case NAMED_PROPERTY:
3721 __ str(r0, MemOperand(sp, kPointerSize));
3722 break;
3723 case NAMED_SUPER_PROPERTY:
3724 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3725 break;
3726 case KEYED_PROPERTY:
3727 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3728 break;
3729 case KEYED_SUPER_PROPERTY:
3730 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3731 break;
3732 }
3733 }
3734 }
3735
3736 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
3737 __ b(vc, &done);
3738 // Call stub. Undo operation first.
3739 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
3740 __ jmp(&stub_call);
3741 __ bind(&slow);
3742 }
3743 if (!is_strong(language_mode())) {
3744 ToNumberStub convert_stub(isolate());
3745 __ CallStub(&convert_stub);
3746 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3747 }
3748
3749 // Save result for postfix expressions.
3750 if (expr->is_postfix()) {
3751 if (!context()->IsEffect()) {
3752 // Save the result on the stack. If we have a named or keyed property
3753 // we store the result under the receiver that is currently on top
3754 // of the stack.
3755 switch (assign_type) {
3756 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003757 PushOperand(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003758 break;
3759 case NAMED_PROPERTY:
3760 __ str(r0, MemOperand(sp, kPointerSize));
3761 break;
3762 case NAMED_SUPER_PROPERTY:
3763 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3764 break;
3765 case KEYED_PROPERTY:
3766 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3767 break;
3768 case KEYED_SUPER_PROPERTY:
3769 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3770 break;
3771 }
3772 }
3773 }
3774
3775
3776 __ bind(&stub_call);
3777 __ mov(r1, r0);
3778 __ mov(r0, Operand(Smi::FromInt(count_value)));
3779
3780 SetExpressionPosition(expr);
3781
Ben Murdoch097c5b22016-05-18 11:27:45 +01003782 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003783 CallIC(code, expr->CountBinOpFeedbackId());
3784 patch_site.EmitPatchInfo();
3785 __ bind(&done);
3786
3787 if (is_strong(language_mode())) {
3788 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3789 }
3790 // Store the value returned in r0.
3791 switch (assign_type) {
3792 case VARIABLE:
3793 if (expr->is_postfix()) {
3794 { EffectContext context(this);
3795 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3796 Token::ASSIGN, expr->CountSlot());
3797 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3798 context.Plug(r0);
3799 }
3800 // For all contexts except EffectConstant We have the result on
3801 // top of the stack.
3802 if (!context()->IsEffect()) {
3803 context()->PlugTOS();
3804 }
3805 } else {
3806 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3807 Token::ASSIGN, expr->CountSlot());
3808 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3809 context()->Plug(r0);
3810 }
3811 break;
3812 case NAMED_PROPERTY: {
3813 __ mov(StoreDescriptor::NameRegister(),
3814 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003815 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003816 EmitLoadStoreICSlot(expr->CountSlot());
3817 CallStoreIC();
3818 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3819 if (expr->is_postfix()) {
3820 if (!context()->IsEffect()) {
3821 context()->PlugTOS();
3822 }
3823 } else {
3824 context()->Plug(r0);
3825 }
3826 break;
3827 }
3828 case NAMED_SUPER_PROPERTY: {
3829 EmitNamedSuperPropertyStore(prop);
3830 if (expr->is_postfix()) {
3831 if (!context()->IsEffect()) {
3832 context()->PlugTOS();
3833 }
3834 } else {
3835 context()->Plug(r0);
3836 }
3837 break;
3838 }
3839 case KEYED_SUPER_PROPERTY: {
3840 EmitKeyedSuperPropertyStore(prop);
3841 if (expr->is_postfix()) {
3842 if (!context()->IsEffect()) {
3843 context()->PlugTOS();
3844 }
3845 } else {
3846 context()->Plug(r0);
3847 }
3848 break;
3849 }
3850 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003851 PopOperands(StoreDescriptor::ReceiverRegister(),
3852 StoreDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003853 Handle<Code> ic =
3854 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3855 EmitLoadStoreICSlot(expr->CountSlot());
3856 CallIC(ic);
3857 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3858 if (expr->is_postfix()) {
3859 if (!context()->IsEffect()) {
3860 context()->PlugTOS();
3861 }
3862 } else {
3863 context()->Plug(r0);
3864 }
3865 break;
3866 }
3867 }
3868}
3869
3870
3871void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3872 Expression* sub_expr,
3873 Handle<String> check) {
3874 Label materialize_true, materialize_false;
3875 Label* if_true = NULL;
3876 Label* if_false = NULL;
3877 Label* fall_through = NULL;
3878 context()->PrepareTest(&materialize_true, &materialize_false,
3879 &if_true, &if_false, &fall_through);
3880
3881 { AccumulatorValueContext context(this);
3882 VisitForTypeofValue(sub_expr);
3883 }
3884 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3885
3886 Factory* factory = isolate()->factory();
3887 if (String::Equals(check, factory->number_string())) {
3888 __ JumpIfSmi(r0, if_true);
3889 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3890 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3891 __ cmp(r0, ip);
3892 Split(eq, if_true, if_false, fall_through);
3893 } else if (String::Equals(check, factory->string_string())) {
3894 __ JumpIfSmi(r0, if_false);
3895 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
3896 Split(lt, if_true, if_false, fall_through);
3897 } else if (String::Equals(check, factory->symbol_string())) {
3898 __ JumpIfSmi(r0, if_false);
3899 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
3900 Split(eq, if_true, if_false, fall_through);
3901 } else if (String::Equals(check, factory->boolean_string())) {
3902 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3903 __ b(eq, if_true);
3904 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
3905 Split(eq, if_true, if_false, fall_through);
3906 } else if (String::Equals(check, factory->undefined_string())) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003907 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3908 __ b(eq, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003909 __ JumpIfSmi(r0, if_false);
3910 // Check for undetectable objects => true.
3911 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3912 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3913 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3914 Split(ne, if_true, if_false, fall_through);
3915
3916 } else if (String::Equals(check, factory->function_string())) {
3917 __ JumpIfSmi(r0, if_false);
3918 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3919 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3920 __ and_(r1, r1,
3921 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3922 __ cmp(r1, Operand(1 << Map::kIsCallable));
3923 Split(eq, if_true, if_false, fall_through);
3924 } else if (String::Equals(check, factory->object_string())) {
3925 __ JumpIfSmi(r0, if_false);
3926 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3927 __ b(eq, if_true);
3928 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3929 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
3930 __ b(lt, if_false);
3931 // Check for callable or undetectable objects => false.
3932 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3933 __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3934 Split(eq, if_true, if_false, fall_through);
3935// clang-format off
3936#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3937 } else if (String::Equals(check, factory->type##_string())) { \
3938 __ JumpIfSmi(r0, if_false); \
3939 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); \
3940 __ CompareRoot(r0, Heap::k##Type##MapRootIndex); \
3941 Split(eq, if_true, if_false, fall_through);
3942 SIMD128_TYPES(SIMD128_TYPE)
3943#undef SIMD128_TYPE
3944 // clang-format on
3945 } else {
3946 if (if_false != fall_through) __ jmp(if_false);
3947 }
3948 context()->Plug(if_true, if_false);
3949}
3950
3951
3952void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3953 Comment cmnt(masm_, "[ CompareOperation");
3954 SetExpressionPosition(expr);
3955
3956 // First we try a fast inlined version of the compare when one of
3957 // the operands is a literal.
3958 if (TryLiteralCompare(expr)) return;
3959
3960 // Always perform the comparison for its control flow. Pack the result
3961 // into the expression's context after the comparison is performed.
3962 Label materialize_true, materialize_false;
3963 Label* if_true = NULL;
3964 Label* if_false = NULL;
3965 Label* fall_through = NULL;
3966 context()->PrepareTest(&materialize_true, &materialize_false,
3967 &if_true, &if_false, &fall_through);
3968
3969 Token::Value op = expr->op();
3970 VisitForStackValue(expr->left());
3971 switch (op) {
3972 case Token::IN:
3973 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003974 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003975 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3976 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3977 Split(eq, if_true, if_false, fall_through);
3978 break;
3979
3980 case Token::INSTANCEOF: {
3981 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003982 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003983 InstanceOfStub stub(isolate());
3984 __ CallStub(&stub);
3985 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3986 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3987 Split(eq, if_true, if_false, fall_through);
3988 break;
3989 }
3990
3991 default: {
3992 VisitForAccumulatorValue(expr->right());
3993 Condition cond = CompareIC::ComputeCondition(op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003994 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003995
3996 bool inline_smi_code = ShouldInlineSmiCase(op);
3997 JumpPatchSite patch_site(masm_);
3998 if (inline_smi_code) {
3999 Label slow_case;
4000 __ orr(r2, r0, Operand(r1));
4001 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4002 __ cmp(r1, r0);
4003 Split(cond, if_true, if_false, NULL);
4004 __ bind(&slow_case);
4005 }
4006
Ben Murdoch097c5b22016-05-18 11:27:45 +01004007 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004008 CallIC(ic, expr->CompareOperationFeedbackId());
4009 patch_site.EmitPatchInfo();
4010 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4011 __ cmp(r0, Operand::Zero());
4012 Split(cond, if_true, if_false, fall_through);
4013 }
4014 }
4015
4016 // Convert the result of the comparison into one expected for this
4017 // expression's context.
4018 context()->Plug(if_true, if_false);
4019}
4020
4021
4022void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4023 Expression* sub_expr,
4024 NilValue nil) {
4025 Label materialize_true, materialize_false;
4026 Label* if_true = NULL;
4027 Label* if_false = NULL;
4028 Label* fall_through = NULL;
4029 context()->PrepareTest(&materialize_true, &materialize_false,
4030 &if_true, &if_false, &fall_through);
4031
4032 VisitForAccumulatorValue(sub_expr);
4033 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4034 if (expr->op() == Token::EQ_STRICT) {
4035 Heap::RootListIndex nil_value = nil == kNullValue ?
4036 Heap::kNullValueRootIndex :
4037 Heap::kUndefinedValueRootIndex;
4038 __ LoadRoot(r1, nil_value);
4039 __ cmp(r0, r1);
4040 Split(eq, if_true, if_false, fall_through);
4041 } else {
4042 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4043 CallIC(ic, expr->CompareOperationFeedbackId());
4044 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4045 Split(eq, if_true, if_false, fall_through);
4046 }
4047 context()->Plug(if_true, if_false);
4048}
4049
4050
4051void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4052 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4053 context()->Plug(r0);
4054}
4055
4056
4057Register FullCodeGenerator::result_register() {
4058 return r0;
4059}
4060
4061
4062Register FullCodeGenerator::context_register() {
4063 return cp;
4064}
4065
4066
4067void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4068 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4069 __ str(value, MemOperand(fp, frame_offset));
4070}
4071
4072
4073void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4074 __ ldr(dst, ContextMemOperand(cp, context_index));
4075}
4076
4077
4078void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4079 Scope* closure_scope = scope()->ClosureScope();
4080 if (closure_scope->is_script_scope() ||
4081 closure_scope->is_module_scope()) {
4082 // Contexts nested in the native context have a canonical empty function
4083 // as their closure, not the anonymous closure containing the global
4084 // code.
4085 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
4086 } else if (closure_scope->is_eval_scope()) {
4087 // Contexts created by a call to eval have the same closure as the
4088 // context calling eval, not the anonymous closure containing the eval
4089 // code. Fetch it from the context.
4090 __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4091 } else {
4092 DCHECK(closure_scope->is_function_scope());
4093 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4094 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004095 PushOperand(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004096}
4097
4098
4099// ----------------------------------------------------------------------------
4100// Non-local control flow support.
4101
4102void FullCodeGenerator::EnterFinallyBlock() {
4103 DCHECK(!result_register().is(r1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004104 // Store pending message while executing finally block.
4105 ExternalReference pending_message_obj =
4106 ExternalReference::address_of_pending_message_obj(isolate());
4107 __ mov(ip, Operand(pending_message_obj));
4108 __ ldr(r1, MemOperand(ip));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004109 PushOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004110
4111 ClearPendingMessage();
4112}
4113
4114
4115void FullCodeGenerator::ExitFinallyBlock() {
4116 DCHECK(!result_register().is(r1));
4117 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004118 PopOperand(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004119 ExternalReference pending_message_obj =
4120 ExternalReference::address_of_pending_message_obj(isolate());
4121 __ mov(ip, Operand(pending_message_obj));
4122 __ str(r1, MemOperand(ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004123}
4124
4125
4126void FullCodeGenerator::ClearPendingMessage() {
4127 DCHECK(!result_register().is(r1));
4128 ExternalReference pending_message_obj =
4129 ExternalReference::address_of_pending_message_obj(isolate());
4130 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
4131 __ mov(ip, Operand(pending_message_obj));
4132 __ str(r1, MemOperand(ip));
4133}
4134
4135
4136void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4137 DCHECK(!slot.IsInvalid());
4138 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
4139 Operand(SmiFromSlot(slot)));
4140}
4141
Ben Murdoch097c5b22016-05-18 11:27:45 +01004142void FullCodeGenerator::DeferredCommands::EmitCommands() {
4143 DCHECK(!result_register().is(r1));
4144 __ Pop(result_register()); // Restore the accumulator.
4145 __ Pop(r1); // Get the token.
4146 for (DeferredCommand cmd : commands_) {
4147 Label skip;
4148 __ cmp(r1, Operand(Smi::FromInt(cmd.token)));
4149 __ b(ne, &skip);
4150 switch (cmd.command) {
4151 case kReturn:
4152 codegen_->EmitUnwindAndReturn();
4153 break;
4154 case kThrow:
4155 __ Push(result_register());
4156 __ CallRuntime(Runtime::kReThrow);
4157 break;
4158 case kContinue:
4159 codegen_->EmitContinue(cmd.target);
4160 break;
4161 case kBreak:
4162 codegen_->EmitBreak(cmd.target);
4163 break;
4164 }
4165 __ bind(&skip);
4166 }
4167}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004168
4169#undef __
4170
4171
4172static Address GetInterruptImmediateLoadAddress(Address pc) {
4173 Address load_address = pc - 2 * Assembler::kInstrSize;
4174 if (!FLAG_enable_embedded_constant_pool) {
4175 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4176 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
4177 // This is an extended constant pool lookup.
4178 if (CpuFeatures::IsSupported(ARMv7)) {
4179 load_address -= 2 * Assembler::kInstrSize;
4180 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4181 DCHECK(Assembler::IsMovT(
4182 Memory::int32_at(load_address + Assembler::kInstrSize)));
4183 } else {
4184 load_address -= 4 * Assembler::kInstrSize;
4185 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4186 DCHECK(Assembler::IsOrrImmed(
4187 Memory::int32_at(load_address + Assembler::kInstrSize)));
4188 DCHECK(Assembler::IsOrrImmed(
4189 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4190 DCHECK(Assembler::IsOrrImmed(
4191 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
4192 }
4193 } else if (CpuFeatures::IsSupported(ARMv7) &&
4194 Assembler::IsMovT(Memory::int32_at(load_address))) {
4195 // This is a movw / movt immediate load.
4196 load_address -= Assembler::kInstrSize;
4197 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4198 } else if (!CpuFeatures::IsSupported(ARMv7) &&
4199 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
4200 // This is a mov / orr immediate load.
4201 load_address -= 3 * Assembler::kInstrSize;
4202 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4203 DCHECK(Assembler::IsOrrImmed(
4204 Memory::int32_at(load_address + Assembler::kInstrSize)));
4205 DCHECK(Assembler::IsOrrImmed(
4206 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4207 } else {
4208 // This is a small constant pool lookup.
4209 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4210 }
4211 return load_address;
4212}
4213
4214
4215void BackEdgeTable::PatchAt(Code* unoptimized_code,
4216 Address pc,
4217 BackEdgeState target_state,
4218 Code* replacement_code) {
4219 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4220 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4221 Isolate* isolate = unoptimized_code->GetIsolate();
4222 CodePatcher patcher(isolate, branch_address, 1);
4223 switch (target_state) {
4224 case INTERRUPT:
4225 {
4226 // <decrement profiling counter>
4227 // bpl ok
4228 // ; load interrupt stub address into ip - either of (for ARMv7):
4229 // ; <small cp load> | <extended cp load> | <immediate load>
4230 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
4231 // | movt ip, #imm | movw ip, #imm
4232 // | ldr ip, [pp, ip]
4233 // ; or (for ARMv6):
4234 // ; <small cp load> | <extended cp load> | <immediate load>
4235 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4236 // | orr ip, ip, #imm> | orr ip, ip, #imm
4237 // | orr ip, ip, #imm> | orr ip, ip, #imm
4238 // | orr ip, ip, #imm> | orr ip, ip, #imm
4239 // blx ip
4240 // <reset profiling counter>
4241 // ok-label
4242
4243 // Calculate branch offset to the ok-label - this is the difference
4244 // between the branch address and |pc| (which points at <blx ip>) plus
4245 // kProfileCounterResetSequence instructions
4246 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
4247 kProfileCounterResetSequenceLength;
4248 patcher.masm()->b(branch_offset, pl);
4249 break;
4250 }
4251 case ON_STACK_REPLACEMENT:
4252 case OSR_AFTER_STACK_CHECK:
4253 // <decrement profiling counter>
4254 // mov r0, r0 (NOP)
4255 // ; load on-stack replacement address into ip - either of (for ARMv7):
4256 // ; <small cp load> | <extended cp load> | <immediate load>
4257 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
4258 // | movt ip, #imm> | movw ip, #imm
4259 // | ldr ip, [pp, ip]
4260 // ; or (for ARMv6):
4261 // ; <small cp load> | <extended cp load> | <immediate load>
4262 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4263 // | orr ip, ip, #imm> | orr ip, ip, #imm
4264 // | orr ip, ip, #imm> | orr ip, ip, #imm
4265 // | orr ip, ip, #imm> | orr ip, ip, #imm
4266 // blx ip
4267 // <reset profiling counter>
4268 // ok-label
4269 patcher.masm()->nop();
4270 break;
4271 }
4272
4273 // Replace the call address.
4274 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
4275 unoptimized_code, replacement_code->entry());
4276
4277 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4278 unoptimized_code, pc_immediate_load_address, replacement_code);
4279}
4280
4281
4282BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4283 Isolate* isolate,
4284 Code* unoptimized_code,
4285 Address pc) {
4286 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
4287
4288 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4289 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4290 Address interrupt_address = Assembler::target_address_at(
4291 pc_immediate_load_address, unoptimized_code);
4292
4293 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4294 DCHECK(interrupt_address ==
4295 isolate->builtins()->InterruptCheck()->entry());
4296 return INTERRUPT;
4297 }
4298
4299 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
4300
4301 if (interrupt_address ==
4302 isolate->builtins()->OnStackReplacement()->entry()) {
4303 return ON_STACK_REPLACEMENT;
4304 }
4305
4306 DCHECK(interrupt_address ==
4307 isolate->builtins()->OsrAfterStackCheck()->entry());
4308 return OSR_AFTER_STACK_CHECK;
4309}
4310
4311
4312} // namespace internal
4313} // namespace v8
4314
4315#endif // V8_TARGET_ARCH_ARM